需要在ARCORE中播放视频

2023-12-30

正如我们在 ARCore 中所知,我们可以通过单击水平面来检测 3D 对象。而不是 3d 对象,当用户要单击平面时,我需要显示视频。外观和感觉应该与显示的 3D 对象相同。视频应以预览模式显示,而不是 3D 对象。

在 ARcore 中,他们目前正在使用一种带有 Surfaceview 的相对布局。因此,为了显示视频,我使用 Surfaceview 并附加媒体播放器。

public void onsurfacecreatedvideo(){
    mProgram = createProgram(mVertexShader, mFragmentShader);
    if (mProgram == 0) {
        return;
    }
    maPositionHandle = GLES20.glGetAttribLocation(mProgram, "aPosition");
    checkGlError("glGetAttribLocation aPosition");
    if (maPositionHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aPosition");
    }
    maTextureHandle = GLES20.glGetAttribLocation(mProgram, "aTextureCoord");
    checkGlError("glGetAttribLocation aTextureCoord");
    if (maTextureHandle == -1) {
        throw new RuntimeException("Could not get attrib location for aTextureCoord");
    }

    muMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
    checkGlError("glGetUniformLocation uMVPMatrix");
    if (muMVPMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uMVPMatrix");
    }

    muSTMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uSTMatrix");
    checkGlError("glGetUniformLocation uSTMatrix");
    if (muSTMatrixHandle == -1) {
        throw new RuntimeException("Could not get attrib location for uSTMatrix");
    }


    int[] textures = new int[1];
    GLES20.glGenTextures(1, textures, 0);

    mTextureID = textures[0];
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);
    checkGlError("glBindTexture mTextureID");

    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
            GLES20.GL_NEAREST);
    GLES20.glTexParameterf(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
            GLES20.GL_LINEAR);

        /*
         * Create the SurfaceTexture that will feed this textureID,
         * and pass it to the MediaPlayer
         */
    mSurface = new SurfaceTexture(mTextureID);
    mSurface.setOnFrameAvailableListener(this);



    Surface surface = new Surface(mSurface);
    mMediaPlayer.setSurface(surface);
    mMediaPlayer.setScreenOnWhilePlaying(true);

    surface.release();

    mMediaPlayer.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
        @Override
        public void onPrepared(MediaPlayer mp) {
            Log.i(TAG,"ONPREPArED abhilash");
            setVideoSize();
            mp.start();
        }
    });
    try {
        mMediaPlayer.prepare();
    } catch (IOException t) {
        Log.e(TAG, "media player prepare failed");
    }

    synchronized(this) {
        updateSurface = false;
    }

    mMediaPlayer.start();

}

void ondrawvideo(){
    synchronized(this) {
        if (updateSurface) {
            mSurface.updateTexImage();
            mSurface.getTransformMatrix(mSTMatrix);
            updateSurface = false;
        }
    }

    /////////////
    GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f);
    GLES20.glClear( GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);


    GLES20.glUseProgram(mProgram);
    checkGlError("glUseProgram");

    GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);



    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
    GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maPosition");
    GLES20.glEnableVertexAttribArray(maPositionHandle);
    checkGlError("glEnableVertexAttribArray maPositionHandle");

    mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
    GLES20.glVertexAttribPointer(maTextureHandle, 3, GLES20.GL_FLOAT, false,
            TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices);
    checkGlError("glVertexAttribPointer maTextureHandle");
    GLES20.glEnableVertexAttribArray(maTextureHandle);
    checkGlError("glEnableVertexAttribArray maTextureHandle");

    Matrix.setIdentityM(mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
    GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
    checkGlError("glDrawArrays");
    GLES20.glFinish();

}


        // Visualize planes.
        mPlaneRenderer.drawPlanes(mSession.getAllPlanes(), frame.getPose(), projmtx);


        // Visualize anchors created by touch.
        float scaleFactor = 1.0f;
        for (PlaneAttachment planeAttachment : mTouches) {
            ondrawvideo();
            if (!planeAttachment.isTracking()) {
                continue;
            }


            // Get the current combined pose of an Anchor and Plane in world space. The Anchor
            // and Plane poses are updated during calls to session.update() as ARCore refines
            // its estimate of the world.
            planeAttachment.getPose().toMatrix(mAnchorMatrix, 0);

            // Update and draw the model and its shadow.
            mVirtualObject.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObjectShadow.updateModelMatrix(mAnchorMatrix, scaleFactor);
            mVirtualObject.draw(viewmtx, projmtx, lightIntensity);
            mVirtualObjectShadow.draw(viewmtx, projmtx, lightIntensity);
        }

    } catch (Throwable t) {
        // Avoid crashing the application due to unhandled exceptions.
        Log.e(TAG, "Exception on the OpenGL thread", t);
    }
}

目前我的输出是这样的。当我单击平面表面时,它显示如下:

在此输入图像描述 https://i.stack.imgur.com/JPEFn.png

正如你所看到的,在下面的图片中,我需要像这样实现它。我只是标记在这个特定的 bugdroid 图像中应该播放视频,视频不应超过全屏;它应该只像 bugdroid 图像大小一样显示:

在此输入图像描述 https://i.stack.imgur.com/UVhdo.jpg


我通过创建一个名为 MovieClipRenderer 的新类来实现此目的,该类以 HelloAR 示例中的 ObjectRenderer 类为模型。这将创建一个四边形几何体并在四边形中渲染来自媒体播放器的纹理。四边形固定在一个平面上,因此当用户环顾四周时它不会移动。

为了进行测试,我使用了以下资源的电影:https://www.videvo.net/video/chicken-on-green-screen/3435/ https://www.videvo.net/video/chicken-on-green-screen/3435/并将其添加到src/main/assets

然后我将渲染器的成员变量添加到HelloArActivity

  private final MovieClipRenderer mMovieClipRenderer = new MovieClipRenderer();

In onSurfaceCreated()我和其他人一起初始化了渲染器

 mMovieClipRenderer.createOnGlThread();

为了尝试一下,我在飞机上第一次点击创建了电影锚点,将点击测试代码稍微更改为:

if (mMovieAnchor == null) {
    mMovieAnchor = hit.createAnchor();
} else {
    mAnchors.add(hit.createAnchor());
}

然后在底部onDrawFrame()我检查了锚并开始播放:

    if (mMovieAnchor != null) {
        // Draw chickens!
        if (!mMovieClipRenderer.isStarted()) {
            mMovieClipRenderer.play("chicken.mp4", this);
        }
        mMovieAnchor.getPose().toMatrix(mAnchorMatrix,0);
        mMovieClipRenderer.update(mAnchorMatrix, 0.25f);
        mMovieClipRenderer.draw(mMovieAnchor.getPose(), viewmtx, projmtx);
    }

渲染类相当长,但是是非常标准的 GLES 代码,用于创建 OES 纹理并初始化视频播放器、创建四边形的顶点并加载绘制 OES 纹理的片段着色器。

/**
 * Renders a movie clip with a green screen aware shader.
 * <p>
 * Copyright 2018 Google LLC
 * <p>
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 * <p>
 * http://www.apache.org/licenses/LICENSE-2.0
 * <p>
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
public class MovieClipRenderer implements 
     SurfaceTexture.OnFrameAvailableListener {
  private static final String TAG = MovieClipRenderer.class.getSimpleName();

  // Quad geometry
  private static final int COORDS_PER_VERTEX = 3;
  private static final int TEXCOORDS_PER_VERTEX = 2;
  private static final int FLOAT_SIZE = 4;
  private static final float[] QUAD_COORDS = new float[]{
          -1.0f, -1.0f, 0.0f,
          -1.0f, +1.0f, 0.0f,
          +1.0f, -1.0f, 0.0f,
          +1.0f, +1.0f, 0.0f,
  };

  private static final float[] QUAD_TEXCOORDS = new float[]{
          0.0f, 1.0f,
          0.0f, 0.0f,
          1.0f, 1.0f,
          1.0f, 0.0f,
  };

  // Shader for a flat quad.
  private static final String VERTEX_SHADER =
      "uniform mat4 u_ModelViewProjection;\n\n" +
      "attribute vec4 a_Position;\n" +
      "attribute vec2 a_TexCoord;\n\n" +
      "varying vec2 v_TexCoord;\n\n" +
      "void main() {\n" +
      "   gl_Position = u_ModelViewProjection * vec4(a_Position.xyz, 1.0);\n" +
     "   v_TexCoord = a_TexCoord;\n" +
     "}";

  // The fragment shader samples the video texture, blending to
  //  transparent for the green screen
  //  color.  The color was determined by sampling a screenshot
  //  of the video in an image editor.
  private static final String FRAGMENT_SHADER =
      "#extension GL_OES_EGL_image_external : require\n" +
      "\n" +
      "precision mediump float;\n" +
      "varying vec2 v_TexCoord;\n" +
      "uniform samplerExternalOES sTexture;\n" +
      "\n" +
      "void main() {\n" +
      "    //TODO make this a uniform variable - " +
      " but this is the color of the background. 17ad2b\n" +
      "  vec3 keying_color = vec3(23.0f/255.0f, 173.0f/255.0f, 43.0f/255.0f);\n" +
      "  float thresh = 0.4f; // 0 - 1.732\n" +
      "  float slope = 0.2;\n" +
      "  vec3 input_color = texture2D(sTexture, v_TexCoord).rgb;\n" +
      "  float d = abs(length(abs(keying_color.rgb - input_color.rgb)));\n" +
      "  float edge0 = thresh * (1.0f - slope);\n" +
      "  float alpha = smoothstep(edge0,thresh,d);\n" +
      "  gl_FragColor = vec4(input_color, alpha);\n" +
      "}";

  // Geometry data in GLES friendly data structure.
  private FloatBuffer mQuadVertices;
  private FloatBuffer mQuadTexCoord;

  // Shader program id and parameters.
  private int mQuadProgram;
  private int mQuadPositionParam;
  private int mQuadTexCoordParam;
  private int mModelViewProjectionUniform;
  private int mTextureId = -1;

  // Matrix for the location and perspective of the quad.
  private float[] mModelMatrix = new float[16];

  // Media player,  texture and other bookkeeping.
  private MediaPlayer player;
  private SurfaceTexture videoTexture;
  private boolean frameAvailable = false;
  private boolean started = false;
  private boolean done;
  private boolean prepared;
  private static Handler handler;


  // Lock used for waiting if the player was not yet created.
  private final Object lock = new Object();

  /**
   * Update the model matrix based on the location and scale to draw the quad.
   */
  public void update(float[] modelMatrix, float scaleFactor) {
    float[] scaleMatrix = new float[16];
    Matrix.setIdentityM(scaleMatrix, 0);
    scaleMatrix[0] = scaleFactor;
    scaleMatrix[5] = scaleFactor;
    scaleMatrix[10] = scaleFactor;
    Matrix.multiplyMM(mModelMatrix, 0, modelMatrix, 0, scaleMatrix, 0);
  }

  /**
   * Initialize the GLES objects.  
   * This is called from the GL render thread to make sure
   * it has access to the EGLContext.
   */
  public void createOnGlThread() {

    // 1 texture to hold the video frame.
    int textures[] = new int[1];
    GLES20.glGenTextures(1, textures, 0);
    mTextureId = textures[0];
    int mTextureTarget = GLES11Ext.GL_TEXTURE_EXTERNAL_OES;
    GLES20.glBindTexture(mTextureTarget, mTextureId);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_S,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_WRAP_T,
       GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MIN_FILTER,
       GLES20.GL_NEAREST);
    GLES20.glTexParameteri(mTextureTarget, GLES20.GL_TEXTURE_MAG_FILTER,
       GLES20.GL_NEAREST);

    videoTexture = new SurfaceTexture(mTextureId);
    videoTexture.setOnFrameAvailableListener(this);

    // Make a quad to hold the movie
    ByteBuffer bbVertices = ByteBuffer.allocateDirect(
         QUAD_COORDS.length * FLOAT_SIZE);
    bbVertices.order(ByteOrder.nativeOrder());
    mQuadVertices = bbVertices.asFloatBuffer();
    mQuadVertices.put(QUAD_COORDS);
    mQuadVertices.position(0);

    int numVertices = 4;
    ByteBuffer bbTexCoords = ByteBuffer.allocateDirect(
            numVertices * TEXCOORDS_PER_VERTEX * FLOAT_SIZE);
    bbTexCoords.order(ByteOrder.nativeOrder());
    mQuadTexCoord = bbTexCoords.asFloatBuffer();
    mQuadTexCoord.put(QUAD_TEXCOORDS);
    mQuadTexCoord.position(0);

    int vertexShader = loadGLShader(TAG, GLES20.GL_VERTEX_SHADER, VERTEX_SHADER);
    int fragmentShader = loadGLShader(TAG,
         GLES20.GL_FRAGMENT_SHADER, FRAGMENT_SHADER);

    mQuadProgram = GLES20.glCreateProgram();
    GLES20.glAttachShader(mQuadProgram, vertexShader);
    GLES20.glAttachShader(mQuadProgram, fragmentShader);
    GLES20.glLinkProgram(mQuadProgram);
    GLES20.glUseProgram(mQuadProgram);

    ShaderUtil.checkGLError(TAG, "Program creation");

    mQuadPositionParam = GLES20.glGetAttribLocation(mQuadProgram, "a_Position");
    mQuadTexCoordParam = GLES20.glGetAttribLocation(mQuadProgram, "a_TexCoord");
    mModelViewProjectionUniform = GLES20.glGetUniformLocation(
            mQuadProgram, "u_ModelViewProjection");

    ShaderUtil.checkGLError(TAG, "Program parameters");

    Matrix.setIdentityM(mModelMatrix, 0);

    initializeMediaPlayer();
  }

  public void draw(Pose pose, float[] cameraView, float[] cameraPerspective) {
    if (done || !prepared) {
      return;
    }
    synchronized (this) {
      if (frameAvailable) {
        videoTexture.updateTexImage();
        frameAvailable = false;
      }
    }

    float[] modelMatrix = new float[16];
    pose.toMatrix(modelMatrix, 0);

    float[] modelView = new float[16];
    float[] modelViewProjection = new float[16];
    Matrix.multiplyMM(modelView, 0, cameraView, 0, mModelMatrix, 0);
    Matrix.multiplyMM(modelViewProjection, 0, cameraPerspective, 0, modelView, 0);

    ShaderUtil.checkGLError(TAG, "Before draw");

    GLES20.glEnable(GL10.GL_BLEND);
    GLES20.glBlendFunc(GL10.GL_SRC_ALPHA, GL10.GL_ONE_MINUS_SRC_ALPHA);
    GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureId);
    GLES20.glUseProgram(mQuadProgram);

    // Set the vertex positions.
    GLES20.glVertexAttribPointer(
            mQuadPositionParam, COORDS_PER_VERTEX, GLES20.GL_FLOAT,
            false, 0, mQuadVertices);
    // Set the texture coordinates.
    GLES20.glVertexAttribPointer(mQuadTexCoordParam, TEXCOORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false, 0, mQuadTexCoord);

    // Enable vertex arrays
    GLES20.glEnableVertexAttribArray(mQuadPositionParam);
    GLES20.glEnableVertexAttribArray(mQuadTexCoordParam);
    GLES20.glUniformMatrix4fv(mModelViewProjectionUniform, 1, false,
                              modelViewProjection, 0);

    GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);

    // Disable vertex arrays
    GLES20.glDisableVertexAttribArray(mQuadPositionParam);
    GLES20.glDisableVertexAttribArray(mQuadTexCoordParam);

    ShaderUtil.checkGLError(TAG, "Draw");
  }

  private void initializeMediaPlayer() {
    if (handler == null)
      handler = new Handler(Looper.getMainLooper());

    handler.post(new Runnable() {
      @Override
      public void run() {
        synchronized (lock) {
          player = new MediaPlayer();
          lock.notify();
        }
      }
    });
  }

  @Override
  public void onFrameAvailable(SurfaceTexture surfaceTexture) {
    synchronized (this) {
      frameAvailable = true;
    }
  }

  public boolean play(final String filename, Context context)
                     throws FileNotFoundException {
    // Wait for the player to be created.
    if (player == null) {
      synchronized (lock) {
        while (player == null) {
          try {
            lock.wait();
          } catch (InterruptedException e) {
            return false;
          }
        }
      }
    }

    player.reset();
    done = false;

    player.setOnPreparedListener(new MediaPlayer.OnPreparedListener() {
      @Override
      public void onPrepared(MediaPlayer mp) {
        prepared = true;
        mp.start();
      }
    });
    player.setOnErrorListener(new MediaPlayer.OnErrorListener() {
      @Override
      public boolean onError(MediaPlayer mp, int what, int extra) {
        done = true;
        Log.e("VideoPlayer",
            String.format("Error occured: %d, %d\n", what, extra));
        return false;
      }
    });

    player.setOnCompletionListener(new MediaPlayer.OnCompletionListener() {
      @Override
      public void onCompletion(MediaPlayer mp) {
        done = true;
      }
    });

    player.setOnInfoListener(new MediaPlayer.OnInfoListener() {
      @Override
      public boolean onInfo(MediaPlayer mediaPlayer, int i, int i1) {
        return false;
      }
    });

    try {
      AssetManager assets = context.getAssets();
      AssetFileDescriptor descriptor = assets.openFd(filename);
      player.setDataSource(descriptor.getFileDescriptor(),
                           descriptor.getStartOffset(),
                           descriptor.getLength());
      player.setSurface(new Surface(videoTexture));
      player.prepareAsync();
      synchronized (this) {
        started = true;
      }
    } catch (IOException e) {
      Log.e(TAG, "Exception preparing movie", e);
      return false;
    }

    return true;
  }

  public synchronized boolean isStarted() {
    return started;
  }

  static int loadGLShader(String tag, int type, String code) {
    int shader = GLES20.glCreateShader(type);
    GLES20.glShaderSource(shader, code);
    GLES20.glCompileShader(shader);

    // Get the compilation status.
    final int[] compileStatus = new int[1];
    GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compileStatus, 0);

    // If the compilation failed, delete the shader.
    if (compileStatus[0] == 0) {
      Log.e(tag, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shader));
      GLES20.glDeleteShader(shader);
      shader = 0;
    }

    if (shader == 0) {
      throw new RuntimeException("Error creating shader.");
    }

    return shader;
  }
}
本文内容由网友自发贡献,版权归原作者所有,本站不承担相应法律责任。如您发现有涉嫌抄袭侵权的内容,请联系:hwhale#tublm.com(使用前将#替换为@)

需要在ARCORE中播放视频 的相关文章

随机推荐

  • 用于匹配大括号的 IntelliJ IDEA 快捷键映射

    在 IntelliJ IDEA 中导航到匹配大括号的键盘映射是什么 public void blah If 是我的光标 我想用这个键盘映射跳到右大括号 我只用 IntelliJ 9 验证了这一点 但是 在 Windows 上 Ctrl wi
  • 将 GitHub 分叉到 AzureDevOps?

    这篇2014年的帖子 http www woodcp com 2014 01 how to fork git repositories on visual studio online 表示没有对从 GitHub 分叉到 AzureDevOp
  • matplotlib 中的 Latex 渲染错误

    使用 python 2 7 12 在 Ubuntu 16 04 上 和 matplotlib 1 5 2 以下代码渲染不正确 from matplotlib pyplot import plot 1 2 1 1 xlabel r beta
  • Python。如何优化搜索功能

    有什么办法可以优化这两个功能吗 第一个功能 def searchList list element for i in range 0 len list if list i element return True return False 第
  • Mockito - void 函数上的 thenCallRealMethod()

    我在尝试编写 JUnit 测试用例时遇到了问题 而且对 Mockito 还比较陌生 我有一个正在嘲笑的类的函数 该函数恰好是 void 返回类型 当从我的模拟类调用此函数时 我的理解 和调试经验 是它不会调用原始函数 为了克服这个问题 我尝
  • pythonanywhere - 如何使用 websockets 按照 web2py 消息传递示例传输消息?

    因此 我构建了一个应用程序来使用 web2py 和 pythonanywhere 测试 websockets 并且它可以在本地工作 但是当上传到 pythonanywhere 时它不起作用 我认为原因是我正在将内容发送到本地主机 127 0
  • Google Play 游戏功能徽章在 Google Play 上不可见

    我成功地将 Google Play 服务集成到我的 Android 应用程序中 并添加了成就和排行榜 但发布已经几天了 但我在 Google Play 上看不到它的徽章 我需要向 AndroidManifest xml 添加什么内容吗 这些
  • Java util zip 创建“损坏”的 zip 文件

    我正在压缩目录的内容 但在尝试打开压缩文件时遇到错误 谁能告诉我的代码发生了什么 也许我没有分配足够的字节 查看 zipDirectory 内部 您会看到我正在压缩包含特殊扩展文件的文件夹 不确定错误发生在哪里 所以也许有人可以帮助我 非常
  • 使用 MongoDB 进行 Elastic Beanstalk 部署

    非常感谢有关如何通过以下堆栈正确部署 Elastic Beanstalk 的资源建议 MongoDB 导轨 美洲狮 Sidekiq Redis 弹性搜索 我需要在 ebextension 文件中设置所有这些内容吗 或者是在 AWS 中手动设
  • 给定音频流,查找门何时关上(声压级计算?)

    与拍手探测器没什么不同 拍手 拍手鼓掌拍手吧 拍手鼓掌拍手 拍手 拍板 拍手鼓掌 我需要检测门何时关闭 这是在车辆中 这比房间或家庭门更容易 Listen http ubasics com so van driver door openin
  • 这不是尾递归风格的代码吗?

    我对 Scala 有点陌生 在阅读 David Pollack 的 Begining Scala 时尝试了它 他定义了一个简单的递归函数 从文件中加载所有字符串 def allStrings expr gt String List Stri
  • Android Volley,JsonObjectRequest 但接收 JsonArray

    所以我正在使用JsonObjectRequest发送一个JsonObject到休息电话 但它返回JsonArray而不是一个JsonObject 它给了我一个错误 说它无法解析来自JsonObjectRequest 但如果我使用JsonAr
  • Go指针第一次赋值

    当我在玩弄闭包时 我偶然发现了这种行为 我无法理解它 此代码片段的工作原理 func main var a string foo var b string var c string bar b c fmt Printf s s n a b
  • 比较数组不打印差异

    这是我的测试代码 a array Peter gt 35 Ben gt 37 Joe gt 21 b array Peter gt 35 Ben gt 21 Joe gt 43 function leo array diff a b map
  • DWT Java 脚本库进度对话框自动与 Angular 5 显示

    我将 Dynamic Web Twain javascript 库合并到我的 Angular 应用程序中 以允许我的最终用户使用 Web 浏览器而不是桌面应用程序进行扫描 加载页面后 立即弹出进度条 这不是预期的行为 当我使用 Angula
  • IE 9+ 下载属性解决方法

    我正在尝试从我的网络服务下载文件 我需要将复杂的元数据传递到服务器才能知道如何下载文件 以下是我如何在常青浏览器中实现这一目标 i use angular but not important for this demo http post
  • 再次仔细检查锁定和 C#

    最近 我一直在重构一些 C 代码 我发现发生了一些双重检查锁定实践 我当时并不知道这是一种不好的做法 我真的很想摆脱它 问题是我有一个类应该延迟初始化并被大量线程频繁访问 我也不想将初始化移至静态初始化程序 因为我计划使用弱引用来防止初始化
  • 使用 ITextSharp 将 HTML 文件转换为 PDF 文件

    我想完成以下任务 给定 html 文件的路径名和所需的 pdf 文件的路径名 使用 ITextSharp 将 HTML 文件转换为 PDF 我见过很多代码示例 它们的功能与此接近 但不完全是我需要的 我相信我的解决方案需要使用 iTextS
  • 使用 api v2 创建 Facebook 事件

    可以使用 Facebook API 创建事件吗 API v1 中可以实现 https developers facebook com docs graph api reference v1 0 event publish https dev
  • 需要在ARCORE中播放视频

    正如我们在 ARCore 中所知 我们可以通过单击水平面来检测 3D 对象 而不是 3d 对象 当用户要单击平面时 我需要显示视频 外观和感觉应该与显示的 3D 对象相同 视频应以预览模式显示 而不是 3D 对象 在 ARcore 中 他们