从assets目录加载文件

时间:2012-03-12 23:48:37

标签: java android inputstream assets

西洛。

我正在为android 2.2编写一个java应用程序。

我正在尝试通过获取它的InputStream来加载文件。 据我所知,我应该将文件放在资产目录中。

我正在使用以下代码加载文件:

        InputStream audioFileStream=null;
        try {
            audioFileStream = getResources().getAssets().open("bounce.wav");
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

当我执行应用程序时,我收到以下消息:

W/System.err(339): java.io.IOException: BufferedInputStream is closed

我应该将文件放在assets目录中吗?我应该使用getResource吗?如果是这样的话?

这是我的第一个Android应用程序(除了基本的hello世界之外)所以请光临我:)

感谢

更多信息

我在Reading a simple text file发现了一个类似的问题,所以我尝试将文件放在res / raw,然后代码完成确实通过使用以下代码找到了该文件:

audioFileStream = getResources().openRawResource(R.raw.bounce);

但是当我执行应用程序时,它显示以下错误:

03-13 01:56:45.012: W/System.err(373): java.io.IOException: BufferedInputStream is closed

更新

  • bubble.wav是一个非常小的wav文件。
  • 我没有对BufferInputStream.close()
  • 进行任何调用

一般来说我想加载一个wav文件,以获取原始PCM并播放它(以及将来操作它)。

这是主视图类:

公共类Panel扩展了View {

    private Paint mPaint;

    public Panel(Context context) {
            super(context);
              mPaint = new Paint();
              mPaint.setDither(true);
              mPaint.setColor(0xFFFFFF00);
              mPaint.setStyle(Paint.Style.STROKE);
              mPaint.setStrokeJoin(Paint.Join.ROUND);
              mPaint.setStrokeCap(Paint.Cap.ROUND);
              mPaint.setStrokeWidth(3);
        }

        @Override
        public void onDraw(Canvas canvas) {
            Path path = new Path();
            path.moveTo(100, 100);
            path.lineTo(200, 200);
            canvas.drawPath(path,mPaint);
            InputStream audioFileStream=null;

                            audioFileStream = getResources().openRawResource(R.raw.bounce);

            ParseWav playWav = new ParseWav();
            playWav.loadWavFile(audioFileStream);
        }

这是ParseWav类:

// Modified from
// - android-tuner project at google code: http://code.google.com/p/androidtuner/
// - http://mindtherobot.com/blog/580/android-audio-play-a-wav-file-on-an-audiotrack/

public class ParseWav extends AudioAbstract {

           private static final String RIFF_HEADER = "RIFF";
            private static final String WAVE_HEADER = "WAVE";
            private final String FMT_HEADER = "fmt ";
            private final String DATA_HEADER = "data";

            private final int HEADER_SIZE = 44;

            private final String CHARSET = "ASCII";

            private String LOG_TAG = "PitchDetector";



            public void loadWavFile(InputStream audioFileStream) {

                    //http://developer.android.com/reference/java/io/FileInputStream.html
                    InputStream in = null;
                       try {

                         in = new BufferedInputStream(audioFileStream);
                       }
                        finally {
                         if (in != null) {
                           try {
                                            in.close();
                                    } catch (IOException e) {
                                            // TODO Auto-generated catch block
                                            e.printStackTrace();
                                    }
                         }
                       }

                    WavInfo wavInfo=null;
                    byte[] wavePcm=null;
                            try {
                                    wavInfo = this.readHeader(in);
                            } catch (IOException e) {
                                    // TODO Auto-generated catch block
                                    e.printStackTrace();
                            } catch (WavException e) {
                                    // TODO Auto-generated catch block
                                    e.printStackTrace();
                            }
                            try {
                                    wavePcm = this.readWavPcm(wavInfo, in);
                            } catch (IOException e) {
                                    // TODO Auto-generated catch block
                                    e.printStackTrace();
                            }
                    this.play(wavePcm);
            }

            private  void checkFormat(boolean bSuccess, String message) throws WavException {
            if (!bSuccess) {
                    Log.e(LOG_TAG, message);
                    throw new WavException();
            }
    }

    public WavInfo readHeader(InputStream wavStream) throws IOException, WavException {

            ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE);
            buffer.order(ByteOrder.LITTLE_ENDIAN);

            wavStream.read(buffer.array(), buffer.arrayOffset(), buffer.capacity());

            buffer.rewind();
            buffer.position(buffer.position() + 20);
            int format = buffer.getShort();
            checkFormat(format == 1, "Unsupported encoding: " + format); // 1 means
                                                                                                                                            // Linear
                                                                                                                                            // PCM
            int channels = buffer.getShort();
            checkFormat(channels == 1 || channels == 2, "Unsupported channels: "
                            + channels);
            int rate = buffer.getInt();
            checkFormat(rate <= 48000 && rate >= 11025, "Unsupported rate: " + rate);
            buffer.position(buffer.position() + 6);
            int bits = buffer.getShort();
            checkFormat(bits == 16, "Unsupported bits: " + bits);
            int dataSize = 0;
            while (buffer.getInt() != 0x61746164) { // "data" marker
                    Log.d(LOG_TAG, "Skipping non-data chunk");
                    int size = buffer.getInt();
                    wavStream.skip(size);

                    buffer.rewind();
                    wavStream.read(buffer.array(), buffer.arrayOffset(), 8);
                    buffer.rewind();
            }
            dataSize = buffer.getInt();
            checkFormat(dataSize > 0, "wrong datasize: " + dataSize);

            return new WavInfo(rate, channels, dataSize);
    }

    public byte[] readWavPcm(WavInfo info, InputStream stream)
            throws IOException {
    byte[] data = new byte[info.dataSize];
    stream.read(data, 0, data.length);
    return data;
}

public void play(byte[] byteData)
{
    // Set and push to audio track..
    int intSize = android.media.AudioTrack.getMinBufferSize(44100, AudioFormat.CHANNEL_CONFIGURATION_MONO,
    AudioFormat.ENCODING_PCM_16BIT);
    AudioTrack at = new AudioTrack(AudioManager.STREAM_MUSIC, 44100, AudioFormat.CHANNEL_CONFIGURATION_MONO,
    AudioFormat.ENCODING_PCM_16BIT, intSize, AudioTrack.MODE_STREAM);
    if (at != null) {
            at.play();
            // Write the byte array to the track
            at.write(byteData, 0, byteData.length);
            at.stop();
            at.release();
    }
    else {
            Log.d("TCAudio", "audio track is not initialised ");
    }
    int a=1;

}

}

糟糕

我确实关闭了InputStream!我一下班就要测试一下!谢谢!

感谢

幼狮

0 个答案:

没有答案