2016-09-02 5 views
0

Dies ist meine Haupttätigkeit, dies ist der Code für Kamerafilter. Ich habe keinen Fehler gefunden. aber wenn ich diesen Code laufen lasse, stürzt er in meinem realen Gerät ab.kann irgendeine Hilfe darin bitte wo ist der Fehler und was ist der Fehler gehört. Ich füge den Log-Cat-Fehler als ein Bild an. Sie finden eslogcat Fehler kann den Fehler nicht finden

public class MainActivity extends AppCompatActivity { 
    private static final int REQUEST_CAMERA_PERSIMMISON = 101; 
    private CameraRenderer renderer; 
    private TextureView textureView; 
    private int filterId = R.id.filter0; 
    /** 
    * ATTENTION: This was auto-generated to implement the App Indexing API. 
    * See https://g.co/AppIndexing/AndroidStudio for more information. 
    */ 
    private GoogleApiClient client; 


    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 
     setContentView(R.layout.activity_main); 
     setTitle("Original"); 

     if (ContextCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager 
       .PERMISSION_GRANTED) { 
      if (ActivityCompat.shouldShowRequestPermissionRationale(this, Manifest.permission.CAMERA)) { 
       Toast.makeText(this, "Camera acess is required.", Toast.LENGTH_SHORT).show(); 
      } else { 
       ActivityCompat.requestPermissions(this, new String[]{Manifest.permission.CAMERA}, REQUEST_CAMERA_PERSIMMISON); 
      } 


     } else { 
      setupCameraPreviewView(); 
     } 

     // ATTENTION: This was auto-generated to implement the App Indexing API. 
     // See https://g.co/AppIndexing/AndroidStudio for more information. 
     client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build(); 
    } 

    void setupCameraPreviewView() { 
     renderer = new CameraRenderer(this); 
     textureView = (TextureView) findViewById(R.id.textureView); 
     assert textureView != null; 
     textureView.setSurfaceTextureListener(renderer); 
     textureView.setOnTouchListener(new View.OnTouchListener() { 
      @Override 
      public boolean onTouch(View v, MotionEvent event) { 

       switch (event.getAction()) { 
        case MotionEvent.ACTION_DOWN: 
         renderer.setSelectedFilter(R.id.filter0); 
         break; 
        case MotionEvent.ACTION_UP: 
        case MotionEvent.ACTION_CANCEL: 
         renderer.setSelectedFilter(filterId); 
         break; 
       } 
       return true; 
      } 
     }); 


     textureView.addOnLayoutChangeListener(new View.OnLayoutChangeListener() { 
      @Override 
      public void onLayoutChange(View v, int left, int top, int right, int bottom, int oldLeft, int oldTop, int oldRight, int oldBottom) { 
       renderer.onSurfaceTextureSizeChanged(null, v.getWidth(), v.getHeight()); 
      } 
     }); 

    } 

    public boolean onCreateOptionsMenu(Menu menu) { 
     getMenuInflater().inflate(R.menu.filter, menu); 
     return true; 
    } 

    public boolean onOptionsItemSelected(MenuItem item) { 
     filterId = item.getItemId(); 
     if (filterId == R.id.capture) { 
      Toast.makeText(this, capture() ? "The capture has been saved to your sdcard root path." : "Saved failed", Toast.LENGTH_SHORT).show(); 
      return true; 
     } 
     setTitle(item.getTitle()); 

     if (renderer != null) 
      renderer.setSelectedFilter(filterId); 
     return true; 
    } 


    private boolean capture() { 
     String mPath = genSaveFileName(getTitle().toString() + "_", ".png"); 
     File imageFile = new File(mPath); 
     if (imageFile.exists()) { 
      imageFile.delete(); 
     } 
     Bitmap bitmap = textureView.getBitmap(); 
     OutputStream outputStream = null; 

     try { 
      outputStream = new FileOutputStream(imageFile); 
      bitmap.compress(Bitmap.CompressFormat.PNG, 90, outputStream); 
      outputStream.flush(); 
      outputStream.close(); 
     } catch (FileNotFoundException e) { 
      e.printStackTrace(); 
      return false; 
     } catch (IOException e) { 
      e.printStackTrace(); 
      return false; 
     } 
     return true; 
    } 

    private String genSaveFileName(String prefix, String suffix) { 
     Date date = new Date(); 
     SimpleDateFormat dateFormat1 = new SimpleDateFormat("yyyyMMdd_hhmmss"); 
     String timeString = dateFormat1.format(date); 
     String externalPath = Environment.getExternalStorageDirectory().toString(); 
     return externalPath + "/" + prefix + timeString + suffix; 
    } 

    @Override 
    public void onStart() { 
     super.onStart(); 

     // ATTENTION: This was auto-generated to implement the App Indexing API. 
     // See https://g.co/AppIndexing/AndroidStudio for more information. 
     client.connect(); 
     Action viewAction = Action.newAction(
       Action.TYPE_VIEW, // TODO: choose an action type. 
       "Main Page", // TODO: Define a title for the content shown. 
       // TODO: If you have web page content that matches this app activity's content, 
       // make sure this auto-generated web page URL is correct. 
       // Otherwise, set the URL to null. 
       Uri.parse("http://host/path"), 
       // TODO: Make sure this auto-generated app URL is correct. 
       Uri.parse("android-app://giri.com.camerafilter/http/host/path") 
     ); 
     AppIndex.AppIndexApi.start(client, viewAction); 
    } 

    @Override 
    public void onStop() { 
     super.onStop(); 

     // ATTENTION: This was auto-generated to implement the App Indexing API. 
     // See https://g.co/AppIndexing/AndroidStudio for more information. 
     Action viewAction = Action.newAction(
       Action.TYPE_VIEW, // TODO: choose an action type. 
       "Main Page", // TODO: Define a title for the content shown. 
       // TODO: If you have web page content that matches this app activity's content, 
       // make sure this auto-generated web page URL is correct. 
       // Otherwise, set the URL to null. 
       Uri.parse("http://host/path"), 
       // TODO: Make sure this auto-generated app URL is correct. 
       Uri.parse("android-app://giri.com.camerafilter/http/host/path") 
     ); 
     AppIndex.AppIndexApi.end(client, viewAction); 
     client.disconnect(); 
    } 
} 







<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
    xmlns:tools="http://schemas.android.com/tools" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" 
    > 

    <TextView 
     android:id="@+id/textureView" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     /> 
</RelativeLayout> 

Das ist mein CameraRenderer.java

public class CameraRenderer extends Thread implements TextureView.SurfaceTextureListener { 
    private static final String TAG = "CameraRenderer"; 
    private static final int EGL_OPENGL_ES2_BIT = 4; 
    private static final int EGL_CONTEXT_CLIENT_VERSION = 0x3098; 
    private static final int DRAW_INTERVAL = 1000/30; 

    private Context context; 
    private SurfaceTexture surfaceTexture; 
    private int gwidth, gheight; 

    private EGLDisplay eglDisplay; 
    private EGLSurface eglSurface; 
    private EGLContext eglContext; 
    private EGL10 egl10; 

    private Camera camera; 
    private SurfaceTexture cameraSurfaceTexture; 
    private int cameraTextureId; 
    private CameraFilter selectedFilter; 
    private SparseArray<CameraFilter> cameraFilterMap = new SparseArray<>(); 

    public CameraRenderer(Context context) { 
     this.context = context; 
    } 

    @Override 
    public void onSurfaceTextureUpdated(SurfaceTexture surface) { 
    } 

    @Override 
    public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 
     GLES20.glViewport(0, 0, gwidth = width, gheight = height); 
    } 

    @Override 
    public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 
     if (camera != null) { 
      camera.stopPreview(); 
      camera.release(); 
     } 
     interrupt(); 
     CameraFilter.release(); 

     return true; 
    } 

    @Override 
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 
     if (isAlive()) { 
      interrupt(); 
     } 

     surfaceTexture = surface; 
     GLES20.glViewport(0, 0, gwidth = width, gheight = height); 

     // Open camera 
     Pair<Camera.CameraInfo, Integer> backCamera = getBackCamera(); 
     final int backCameraId = backCamera.second; 
     camera = Camera.open(backCameraId); 

     // Start rendering 
     start(); 
    } 

    public void setSelectedFilter(int id) { 
     selectedFilter = cameraFilterMap.get(id); 
     selectedFilter.onAttach(); 
    } 

    @Override 
    public void run() { 
     initGL(surfaceTexture); 

     // Setup camera filters map 
     cameraFilterMap.append(R.id.filter0, new OriginalFilter(context)); 
     cameraFilterMap.append(R.id.filter1, new EdgeDetectionFilter(context)); 
     cameraFilterMap.append(R.id.filter2, new PixelizeFilter(context)); 
     cameraFilterMap.append(R.id.filter3, new EMInterferenceFilter(context)); 
     cameraFilterMap.append(R.id.filter4, new TrianglesMosaicFilter(context)); 
     cameraFilterMap.append(R.id.filter5, new LegofiedFilter(context)); 
     cameraFilterMap.append(R.id.filter6, new TileMosaicFilter(context)); 
     cameraFilterMap.append(R.id.filter7, new BlueorangeFilter(context)); 
     cameraFilterMap.append(R.id.filter8, new ChromaticAberrationFilter(context)); 
     cameraFilterMap.append(R.id.filter9, new BasicDeformFilter(context)); 
     cameraFilterMap.append(R.id.filter10, new ContrastFilter(context)); 
     cameraFilterMap.append(R.id.filter11, new NoiseWarpFilter(context)); 
     cameraFilterMap.append(R.id.filter12, new RefractionFilter(context)); 
     cameraFilterMap.append(R.id.filter13, new MappingFilter(context)); 
     cameraFilterMap.append(R.id.filter14, new CrosshatchFilter(context)); 
     cameraFilterMap.append(R.id.filter15, new LichtensteinEsqueFilter(context)); 
     cameraFilterMap.append(R.id.filter16, new AsciiArtFilter(context)); 
     cameraFilterMap.append(R.id.filter17, new MoneyFilter(context)); 
     cameraFilterMap.append(R.id.filter18, new CrackedFilter(context)); 
     cameraFilterMap.append(R.id.filter19, new PolygonizationFilter(context)); 
     cameraFilterMap.append(R.id.filter20, new JFAVoronoiFilter(context)); 
     setSelectedFilter(R.id.filter0); 

     // Create texture for camera preview 
     cameraTextureId = MyGLUtils.genTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); 
     cameraSurfaceTexture = new SurfaceTexture(cameraTextureId); 

     // Start camera preview 
     try { 
      camera.setPreviewTexture(cameraSurfaceTexture); 
      camera.startPreview(); 
     } catch (IOException ioe) { 
      // Something bad happened 
     } 

     // Render loop 
     while (!Thread.currentThread().isInterrupted()) { 
      try { 
       GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 

       // Update the camera preview texture 
       synchronized (this) { 
        cameraSurfaceTexture.updateTexImage(); 
       } 

       // Draw camera preview 
       selectedFilter.draw(cameraTextureId, gwidth, gheight); 

       // Flush 
       GLES20.glFlush(); 
       egl10.eglSwapBuffers(eglDisplay, eglSurface); 

       Thread.sleep(DRAW_INTERVAL); 

      } catch (InterruptedException e) { 
       Thread.currentThread().interrupt(); 
      } 
     } 

     cameraSurfaceTexture.release(); 
     GLES20.glDeleteTextures(1, new int[]{cameraTextureId}, 0); 
    } 

    private void initGL(SurfaceTexture texture) { 
     egl10 = (EGL10) EGLContext.getEGL(); 

     eglDisplay = egl10.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 
     if (eglDisplay == EGL10.EGL_NO_DISPLAY) { 
      throw new RuntimeException("eglGetDisplay failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError())); 
     } 

     int[] version = new int[2]; 
     if (!egl10.eglInitialize(eglDisplay, version)) { 
      throw new RuntimeException("eglInitialize failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError())); 
     } 

     int[] configsCount = new int[1]; 
     EGLConfig[] configs = new EGLConfig[1]; 
     int[] configSpec = { 
       EGL10.EGL_RENDERABLE_TYPE, 
       EGL_OPENGL_ES2_BIT, 
       EGL10.EGL_RED_SIZE, 8, 
       EGL10.EGL_GREEN_SIZE, 8, 
       EGL10.EGL_BLUE_SIZE, 8, 
       EGL10.EGL_ALPHA_SIZE, 8, 
       EGL10.EGL_DEPTH_SIZE, 0, 
       EGL10.EGL_STENCIL_SIZE, 0, 
       EGL10.EGL_NONE 
     }; 

     EGLConfig eglConfig = null; 
     if (!egl10.eglChooseConfig(eglDisplay, configSpec, configs, 1, configsCount)) { 
      throw new IllegalArgumentException("eglChooseConfig failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError())); 
     } else if (configsCount[0] > 0) { 
      eglConfig = configs[0]; 
     } 
     if (eglConfig == null) { 
      throw new RuntimeException("eglConfig not initialized"); 
     } 

     int[] attrib_list = {EGL_CONTEXT_CLIENT_VERSION, 2, EGL10.EGL_NONE}; 
     eglContext = egl10.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrib_list); 
     eglSurface = egl10.eglCreateWindowSurface(eglDisplay, eglConfig, texture, null); 

     if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) { 
      int error = egl10.eglGetError(); 
      if (error == EGL10.EGL_BAD_NATIVE_WINDOW) { 
       Log.e(TAG, "eglCreateWindowSurface returned EGL10.EGL_BAD_NATIVE_WINDOW"); 
       return; 
      } 
      throw new RuntimeException("eglCreateWindowSurface failed " + android.opengl.GLUtils.getEGLErrorString(error)); 
     } 

     if (!egl10.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { 
      throw new RuntimeException("eglMakeCurrent failed " + android.opengl.GLUtils.getEGLErrorString(egl10.eglGetError())); 
     } 
    } 

    private Pair<Camera.CameraInfo, Integer> getBackCamera() { 
     Camera.CameraInfo cameraInfo = new Camera.CameraInfo(); 
     final int numberOfCameras = Camera.getNumberOfCameras(); 

     for (int i = 0; i < numberOfCameras; ++i) { 
      Camera.getCameraInfo(i, cameraInfo); 
      if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) { 
       return new Pair<>(cameraInfo, i); 
      } 
     } 
     return null; 
    } 
} 

**This is my CameraFilter.java file** 

public abstract class CameraFilter { 
    static final float SQUARE_COORDS[] = { 
      1.0f, -1.0f, 
      -1.0f, -1.0f, 
      1.0f, 1.0f, 
      -1.0f, 1.0f, 
    }; 
    static final float TEXTURE_COORDS[] = { 
      1.0f, 0.0f, 
      0.0f, 0.0f, 
      1.0f, 1.0f, 
      0.0f, 1.0f, 
    }; 
    static FloatBuffer VERTEX_BUF, TEXTURE_COORD_BUF; 
    static int PROGRAM = 0; 

    private static final int BUF_ACTIVE_TEX_UNIT = GLES20.GL_TEXTURE8; 
    private static RenderBuffer CAMERA_RENDER_BUF; 

    private static final float ROATED_TEXTURE_COORDS[] = { 
      1.0f, 0.0f, 
      1.0f, 1.0f, 
      0.0f, 0.0f, 
      0.0f, 1.0f, 
    }; 
    private static FloatBuffer ROATED_TEXTURE_COORD_BUF; 

    final long START_TIME = System.currentTimeMillis(); 
    int iFrame = 0; 

    public CameraFilter(Context context) { 
     // Setup default Buffers 
     if (VERTEX_BUF == null) { 
      VERTEX_BUF = ByteBuffer.allocateDirect(SQUARE_COORDS.length * 4) 
        .order(ByteOrder.nativeOrder()).asFloatBuffer(); 
      VERTEX_BUF.put(SQUARE_COORDS); 
      VERTEX_BUF.position(0); 
     } 

     if (TEXTURE_COORD_BUF == null) { 
      TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(TEXTURE_COORDS.length * 4) 
        .order(ByteOrder.nativeOrder()).asFloatBuffer(); 
      TEXTURE_COORD_BUF.put(TEXTURE_COORDS); 
      TEXTURE_COORD_BUF.position(0); 
     } 

     if (ROATED_TEXTURE_COORD_BUF == null) { 
      ROATED_TEXTURE_COORD_BUF = ByteBuffer.allocateDirect(ROATED_TEXTURE_COORDS.length * 4) 
        .order(ByteOrder.nativeOrder()).asFloatBuffer(); 
      ROATED_TEXTURE_COORD_BUF.put(ROATED_TEXTURE_COORDS); 
      ROATED_TEXTURE_COORD_BUF.position(0); 
     } 

     if (PROGRAM == 0) { 
      PROGRAM = MyGLUtils.buildProgram(context, R.raw.vertext, R.raw.original_rtt); 
     } 
    } 

    @CallSuper 
    public void onAttach() { 
     iFrame = 0; 
    } 

    final public void draw(int cameraTexId, int canvasWidth, int canvasHeight) { 
     // TODO move? 
     // Create camera render buffer 
     if (CAMERA_RENDER_BUF == null || 
       CAMERA_RENDER_BUF.getWidth() != canvasWidth || 
       CAMERA_RENDER_BUF.getHeight() != canvasHeight) { 
      CAMERA_RENDER_BUF = new RenderBuffer(canvasWidth, canvasHeight, BUF_ACTIVE_TEX_UNIT); 
     } 

     // Use shaders 
     GLES20.glUseProgram(PROGRAM); 

     int iChannel0Location = GLES20.glGetUniformLocation(PROGRAM, "iChannel0"); 
     GLES20.glActiveTexture(GLES20.GL_TEXTURE0); 
     GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, cameraTexId); 
     GLES20.glUniform1i(iChannel0Location, 0); 

     int vPositionLocation = GLES20.glGetAttribLocation(PROGRAM, "vPosition"); 
     GLES20.glEnableVertexAttribArray(vPositionLocation); 
     GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, VERTEX_BUF); 

     int vTexCoordLocation = GLES20.glGetAttribLocation(PROGRAM, "vTexCoord"); 
     GLES20.glEnableVertexAttribArray(vTexCoordLocation); 
     GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, ROATED_TEXTURE_COORD_BUF); 

     // Render to texture 
     CAMERA_RENDER_BUF.bind(); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 
     GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); 
     CAMERA_RENDER_BUF.unbind(); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 

     onDraw(CAMERA_RENDER_BUF.getTexId(), canvasWidth, canvasHeight); 

     iFrame ++; 
    } 

    abstract void onDraw(int cameraTexId, int canvasWidth, int canvasHeight); 

    void setupShaderInputs(int program, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) { 
     setupShaderInputs(program, VERTEX_BUF, TEXTURE_COORD_BUF, iResolution, iChannels, iChannelResolutions); 
    } 

    void setupShaderInputs(int program, FloatBuffer vertex, FloatBuffer textureCoord, int[] iResolution, int[] iChannels, int[][] iChannelResolutions) { 
     GLES20.glUseProgram(program); 

     int iResolutionLocation = GLES20.glGetUniformLocation(program, "iResolution"); 
     GLES20.glUniform3fv(iResolutionLocation, 1, 
       FloatBuffer.wrap(new float[]{(float) iResolution[0], (float) iResolution[1], 1.0f})); 

     float time = ((float) (System.currentTimeMillis() - START_TIME))/1000.0f; 
     int iGlobalTimeLocation = GLES20.glGetUniformLocation(program, "iGlobalTime"); 
     GLES20.glUniform1f(iGlobalTimeLocation, time); 

     int iFrameLocation = GLES20.glGetUniformLocation(program, "iFrame"); 
     GLES20.glUniform1i(iFrameLocation, iFrame); 

     int vPositionLocation = GLES20.glGetAttribLocation(program, "vPosition"); 
     GLES20.glEnableVertexAttribArray(vPositionLocation); 
     GLES20.glVertexAttribPointer(vPositionLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, vertex); 

     int vTexCoordLocation = GLES20.glGetAttribLocation(program, "vTexCoord"); 
     GLES20.glEnableVertexAttribArray(vTexCoordLocation); 
     GLES20.glVertexAttribPointer(vTexCoordLocation, 2, GLES20.GL_FLOAT, false, 4 * 2, textureCoord); 

     for (int i = 0; i < iChannels.length; i ++) { 
      int sTextureLocation = GLES20.glGetUniformLocation(program, "iChannel" + i); 
      GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + i); 
      GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, iChannels[i]); 
      GLES20.glUniform1i(sTextureLocation, i); 
     } 

     float _iChannelResolutions[] = new float[iChannelResolutions.length * 3]; 
     for (int i = 0; i < iChannelResolutions.length; i++) { 
      _iChannelResolutions[i*3] = iChannelResolutions[i][0]; 
      _iChannelResolutions[i*3 + 1] = iChannelResolutions[i][1]; 
      _iChannelResolutions[i*3 + 2] = 1.0f; 
     } 

     int iChannelResolutionLocation = GLES20.glGetUniformLocation(program, "iChannelResolution"); 
     GLES20.glUniform3fv(iChannelResolutionLocation, 
       _iChannelResolutions.length, FloatBuffer.wrap(_iChannelResolutions)); 
    } 

    public static void release() { 
     PROGRAM = 0; 
     CAMERA_RENDER_BUF = null; 
    } 
} 

immer noch ein Fehler bekommen

E/AndroidRuntime: FATAL EXCEPTION: Thread-1759 Process: giri.com.camerafilter, PID: 21244 
java.lang.NullPointerException: Attempt to invoke virtual method 'void giri.com.camerafilter.RenderBuffer.unbind()' on a null object reference 
at giri.com.camerafilter.filter.CameraFilter.draw(CameraFilter.java:126) 
at giri.com.camerafilter.CameraRenderer.run(CameraRenderer.java:165) 
+0

Post FULL logcat hier – Shaishav

+0

i hinzugefügt wurden sie –

+0

bitte finden @shaishav Bild für logcat Nicht verwenden. Kopiere den Text und poste ihn hier. Die entscheidende Information wird in beiden Bildern geschnitten – Shaishav

Antwort

0

wie folgt tun Sie sind ein ClassCastException, weil Sie bekommen Ich versuche, TextView zu TextureView zu werfen. Ändern:

<TextView 
     android:id="@+id/textureView" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     /> 

-<TextureView...>:

<TextureView 
     android:id="@+id/textureView" 
     android:layout_width="wrap_content" 
     android:layout_height="wrap_content" 
     /> 
+0

Noch immer bekomme ich diesen Fehler. Ich habe jetzt hinzugefügt. Überprüfen Sie bitte das –

0

Es classCastException ist, die besagt, dass Sie sind casting ein class zu einem anderen class welche verschiedene Signaturen haben. Sie haben AppCompatTextview in Ihrem xml verwendet und Sie sind Casting es TextView

könnten Sie diesen

AppCompatTextView textView = (TextView) findViewById(R.id.your_id); 

tun und Sie sollten

AppCompatTextView textView = (AppCompatTextView) findViewById(R.id.your_id); 
+0

ich habe hinzugefügt, aber immer noch zeigt nicht –

+0

@Mahesh Babu was ist jetzt Fehler? – Nikhil

+0

Kann das Symbol "AppCompatTextview" –

Verwandte Themen