Update references based on merge

This commit is contained in:
Simon Leistikow 2017-12-19 16:28:43 +01:00
parent 055a39cd50
commit fab5bffb7e
4 changed files with 84 additions and 38 deletions

View File

@ -85,7 +85,7 @@ public class MainActivity extends AppCompatActivity {
fab.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
PhotoSphereSurfaceView.USE_TOUCH = !PhotoSphereSurfaceView.USE_TOUCH;
sphereFragment.toggleUseTouchInput();
displayUI(false);
}
});

View File

@ -75,4 +75,8 @@ public class SphereFragment extends ImageFragment implements View.OnTouchListene
}
surfaceView.setBitmap(bitmap);
}
public void toggleUseTouchInput() {
surfaceView.setUseTouchInput(!surfaceView.getUseTouchInput());
}
}

View File

@ -94,6 +94,11 @@ public class PhotoSphereParser {
throwIfUnexpectedEOF(i, r.length);
int exifLen = integer(r);
//Check for broken or unavailable header
if (exifLen <= 1) {
return null;
}
//Skip EXIF header
r = new byte[exifLen - 2];
i = inputStream.read(r);

View File

@ -16,15 +16,22 @@ import android.view.MotionEvent;
*/
public class PhotoSphereSurfaceView extends GLSurfaceView implements SensorEventListener {
public static boolean USE_TOUCH = false; // TODO: determine dynamically
// Decision variable for either using touch or device rotation input.
private boolean useTouchInput = false;
// The actual rotation matrix determined by user input.
private final float rotationMatrix [] = new float[16];
// The temporary rotation delta matrix.
private final float tempMatrix [] = new float[16];
// These vectors Are used for ray determination.
private final float rayStart [] = new float[4];
private final float rayDirection [] = new float[4];
//
float oldAngleXZ, oldAngleY;
// The renderer used by this view.
private PhotoSphereRenderer renderer;
@ -55,47 +62,61 @@ public class PhotoSphereSurfaceView extends GLSurfaceView implements SensorEvent
@Override
public boolean onTouchEvent(MotionEvent event) {
if(!USE_TOUCH)
if(!useTouchInput)
return true;
switch (event.getAction()) {
case MotionEvent.ACTION_MOVE:
// Retrieve ray in world space.
renderer.getRay(event.getX(), event.getY(), rayStart, rayDirection);
// Retrieve ray in world space.
renderer.getRay(event.getX(), event.getY(), rayStart, rayDirection);
// Solve quadric equation.
float a = 0.0f, b = 0.0f, c = 0.0f;
for(int i=0; i<3; i++) {
a += rayDirection[i] * rayDirection[i];
b += rayDirection[i] * 2.0f * (rayStart[i]); // Sphere center at origin.
c += rayStart[i]*rayStart[i];
}
c -= PhotoSphereRenderer.SPHERE_RADIUS*PhotoSphereRenderer.SPHERE_RADIUS;
float D = b*b-4.0f*a*c;
// Solve quadric equation.
float a = 0.0f, b = 0.0f, c = 0.0f;
for(int i=0; i<3; i++) {
a += rayDirection[i] * rayDirection[i];
b += rayDirection[i] * 2.0f * (rayStart[i]); // Sphere center at origin.
c += rayStart[i]*rayStart[i];
}
c -= PhotoSphereRenderer.SPHERE_RADIUS*PhotoSphereRenderer.SPHERE_RADIUS;
float D = b*b-4.0f*a*c;
// Since the conditions are
if(D < 0) {
throw new RuntimeException("Ray must intersect with sphere, check camera position");
}
D = (float) Math.sqrt(D);
// Calculate intersection point p.
float t = -0.5f*(b+D)/a;
float px = rayStart[0] + t*rayDirection[0];
float py = rayStart[1] + t*rayDirection[1];
float pz = rayStart[2] + t*rayDirection[2];
// Calculate angles.
//float angleX = (float) Math.toDegrees(Math.atan2(py, px));
//float angleY = (float) Math.toDegrees(Math.acos(pz/Matrix.length(px, py, pz)));
synchronized (rotationMatrix) {
Matrix.setLookAtM(rotationMatrix, 0, 0.0f, 0.0f, 0.0f, px, py, pz, 1.0f, 0.0f, 0.0f);
}
// Since the conditions are
if(D < 0) {
throw new RuntimeException("Ray must intersect with sphere, check camera position");
}
D = (float) Math.sqrt(D);
// Calculate intersection point p.
float t = -0.5f*(b+D)/a;
float px = rayStart[0] + t*rayDirection[0];
float py = rayStart[1] + t*rayDirection[1];
float pz = rayStart[2] + t*rayDirection[2];
synchronized (rotationMatrix) {
Matrix.translateM(rotationMatrix, 0, px, py, pz);
}
/*
// Calculate angles.
float angleY = (float) Math.toDegrees(Math.atan2(pz, px));
float angleXZ = (float) Math.toDegrees(Math.acos(py));
switch (event.getAction()) {
case MotionEvent.ACTION_DOWN:
oldAngleY = angleY;
oldAngleXZ = angleXZ;
System.arraycopy(getRotationMatrix(), 0, tempMatrix, 0, 16);
break;
case MotionEvent.ACTION_MOVE:
synchronized (rotationMatrix) {
System.arraycopy(tempMatrix, 0, rotationMatrix, 0, 16);
//Matrix.rotateM(rotationMatrix, 0, oldAngleY-angleY, 0.0f, 1.0f, 0.0f);
Matrix.rotateM(rotationMatrix, 0, oldAngleXZ-angleXZ, 1.0f, 0.0f, 0.0f);
//Matrix.setLookAtM(rotationMatrix, 0, 0.0f, 0.0f, 0.0f, px, py, pz, 1.0f, 0.0f, 0.0f);
}
break;
}
*/
return true;
}
@ -107,7 +128,7 @@ public class PhotoSphereSurfaceView extends GLSurfaceView implements SensorEvent
@Override
public void onSensorChanged(SensorEvent event) {
if(USE_TOUCH)
if(useTouchInput)
return;
synchronized (rotationMatrix) {
@ -133,4 +154,20 @@ public class PhotoSphereSurfaceView extends GLSurfaceView implements SensorEvent
public void setBitmap(Bitmap bitmap) {
renderer.requestBitmapUpload(bitmap);
}
/**
* Sets input to be used for transformation calculation.
* @param useTouchInput true, if touch input should be used
*/
public void setUseTouchInput(boolean useTouchInput) {
this.useTouchInput = useTouchInput;
}
/**
* Returns if touch input is used for transformation calculations.
* @return true, if touch input is used, false
*/
public boolean getUseTouchInput() {
return useTouchInput;
}
}