motions tracking now plucked in - still needs work

main
mwinter 1 year ago
parent 0eb6de18b1
commit edb4016b25

@ -36,8 +36,8 @@ bool ySafeMode = false;
long yLimitPos = 0;
int microsteps = 1;
float maxSpeedConstant = 350 * microsteps;
float accelerationConstant = 100 * microsteps;
float maxSpeedConstant = 100 * microsteps;
float accelerationConstant = 50 * microsteps;
void setup() {

File diff suppressed because it is too large Load Diff

@ -8,6 +8,7 @@ from picamera2 import MappedArray, Picamera2, Preview
from picamera2.previews.qt import QGlPicamera2
import numpy as np
from pythonosc import udp_client
from libcamera import Transform
def rectFromPoint(center, len, width, axis):
@ -85,9 +86,10 @@ def track(frame, roi, dKernel):
replaceCrop(frame, roiFineOuter, thresh)
meanCourse = pow(cropCoarse.mean(), 1)
#print(meanCourse)
mean = 0
direction = 1
if(meanCourse > 10):
if(meanCourse > 20):
# this could potentially be made more efficient by cropping from cropFineOuter
cropFineInner = crop(frame, roiFineInner)
# this could potentially be made more efficient by cropping from cropFineInner
@ -125,9 +127,9 @@ def picameraToCVTrack():
#frame = cv2.cvtColor(frame, cv2.COLOR_GRAY2RGB)
distanceX = track(frame, roiX, dilationKernel)
distanceY = track(frame, roiY, dilationKernel)
distanceY = track(frame, roiY, dilationKernel) * -1
oscClient.send_message("/distanceX", distanceX)
oscClient.send_message("/trackerpos", [distanceX, distanceY])
drawRoi(frame, roiX)
drawRoi(frame, roiY)
@ -205,11 +207,12 @@ class MainWindow(QGlPicamera2):
picam2 = Picamera2()
#picam2.start_preview(Preview.QTGL)
#max resolution is (4056, 3040) which is more like 10 fps
config = picam2.create_preview_configuration(main={"size": (2028, 1520)}, lores={"size": (768, 768), "format": "YUV420"})
#config = picam2.create_preview_configuration(main={"size": (2028, 1520)}, lores={"size": (768, 768), "format": "YUV420"}, transform=Transform(vflip=False))
config = picam2.create_preview_configuration(main={"size": (4056, 3040)}, lores={"size": (768, 768), "format": "YUV420"}, transform=Transform(vflip=False))
picam2.configure(config)
app = QApplication([])
qpicamera2 = MainWindow(picam2, width=1350, height=1350, keep_ar=False)
qpicamera2 = MainWindow(picam2, width=2000, height=2000, keep_ar=False)
qpicamera2.setWindowTitle("Qt Picamera2 App")
selectedAxis = 'x'
@ -218,8 +221,8 @@ l1 = 100
l2 = 300
l3 = 40
w = 20
roiXCenter = (507, 98)
roiYCenter = (306, 275)
roiXCenter = (384, 20)
roiYCenter = (20, 384)
roiX = rectsFromPoint(roiXCenter, l1, l2, l3, w, 'x')
roiY = rectsFromPoint(roiYCenter, l1, l2, l3, w, 'y')
dilationVal = 75
@ -233,13 +236,22 @@ cv2.namedWindow("Frame", cv2.WINDOW_NORMAL)
cv2.resizeWindow("Frame", 1350, 1350)
cv2.setMouseCallback("Frame", moveROI)
picam2.controls.ScalerCrop = (800, 0, 3040, 3040)
picam2.controls.Brightness = 0.05
picam2.controls.Contrast = 0.9
#picam2.controls.ScalerCrop = (800, 0, 3040, 3040)
picam2.controls.ScalerCrop = (1375, 550, 1800, 1800)
picam2.controls.ScalerCrop = (1100, 400, 2125, 2125)
#picam2.controls.Brightness = 0.2
picam2.controls.Contrast = 1.1
#picam2.set_controls({"ExposureValue": 2})
#picam2.set_controls({"ColourGains": (0, 0)})
#picam2.set_controls({"Sharpness": 10})
#picam2.set_controls({"AeEnable": False})
#picam2.set_controls({"ExposureTime": 1700})
picam2.set_controls({"Saturation": 0})
(w0, h0) = picam2.stream_configuration("main")["size"]
(w1, h1) = picam2.stream_configuration("lores")["size"]
s1 = picam2.stream_configuration("lores")["stride"]
print(h1)
picam2.start()

@ -13,7 +13,7 @@ automation = false;
imgPositions = 9.collect({nil});
curPos = Point.new(0, 0);
tarPos = Point.new(0, 0);
netAddress = NetAddr.new("127.0.0.1", 7777);
netAddress = NetAddr.new("127.0.0.1", 8080);
~serialPort = SerialPort("/dev/ttyACM0", baudrate: 115200, crtscts: true);
// recieve motor feedback
@ -27,7 +27,7 @@ netAddress = NetAddr.new("127.0.0.1", 7777);
byte = ~serialPort.read;
if(byte==13, {
if(str[1].asString == "[", {
valArray = str.asString.interpret; //.postln;
valArray = str.asString.interpret;//.postln;
curPos = Point.new(valArray[0], valArray[1]);
limitSwitchNeg = valArray[2];
limitSwitchPos = valArray[3];
@ -68,6 +68,8 @@ netAddress = NetAddr.new("127.0.0.1", 7777);
// send new coordinates to the arduino / motors
moveTo = {arg point;
"move to".postln;
point.postln;
~serialPort.putAll(point.x.asInteger.asString ++ " " ++ point.y.asInteger.asString);
~serialPort.put(10);
};
@ -77,12 +79,14 @@ jogControl = {arg axis;
jog = Task({
loop{
count = (count + 0.01).clip(0, 1);
count.postln;
jogRate = pow(count, 2) * 500;
if(axis == '/jog_horizontal', {
tarPos.x = curPos.x + (jogRate * jogDirection);
}, {
tarPos.y = curPos.y + (jogRate * jogDirection);
});
curPos.postln;
moveTo.value(tarPos);
0.1.wait
};
@ -90,10 +94,18 @@ jogControl = {arg axis;
OSCFunc({arg msg;
//tarPos.x = curPos.x + (1000 * msg[1]);
//moveTo.value(tarPos);
msg.postln;
if(msg[1] == 0, {count = 0; jogRate = 0; jog.pause()}, {jogDirection = msg[1]; jog.play(AppClock)});
automation = false;
netAddress.sendMsg("/STATE/SET", "{automate: 0}");
}, axis, netAddress)
//netAddress.sendMsg("/STATE/SET", "{automate: 0}");
}, axis, netAddress);
OSCFunc({arg msg;
//tarPos.x = curPos.x + (1000 * msg[1]);
//moveTo.value(tarPos);
if(msg[1] == 0, {count = 0; jogRate = 0; jog.pause()}, {jogRate = 10; jogDirection = msg[1]; jog.play(AppClock)});
automation = false;
//netAddress.sendMsg("/STATE/SET", "{automate: 0}");
}, axis, NetAddr.localAddr)
};
jogHorizontal = jogControl.value('/jog_horizontal');
@ -105,14 +117,15 @@ imgSelect = {
var imgIndex;
if(msg[1] > 0, {
imgIndex = msg[1] - 1;
imgIndex.postln;
if(imgPositions[imgIndex] != nil, {tarPos = imgPositions[imgIndex].deepCopy; moveTo.value(tarPos)});
9.do({arg i; if(imgIndex != i, {
netAddress.sendMsg("/STATE/SET", "{img_" ++ (i + 1).asString ++ "_select: " ++ (i + 1).neg ++ "}")})});
automation = false;
netAddress.sendMsg("/STATE/SET", "{automate: 0}");
//netAddress.sendMsg("/STATE/SET", "{automate: 0}");
lastSelect = imgIndex;
}, {
lastSelect = 0;
//lastSelect = 0;
/*
imgIndex = msg[1].neg - 1;
if(imgIndex == lastSelect, {
@ -128,6 +141,7 @@ imgCalibrate = {
calibrateHold = Routine({
20.do({0.1.wait});
imgPositions[imgIndex] = setPos.deepCopy;
imgPositions.postln;
netAddress.sendMsg("/STATE/SET", "{message: \"image calibrated\"}");
});
@ -143,6 +157,7 @@ imgCalibrate = {
}.value;
automate = OSCFunc({arg msg;
msg.postln;
if(msg[1] == 1, {
automation = true;
}, {
@ -154,7 +169,24 @@ automate = OSCFunc({arg msg;
}, '/automate', netAddress);
trackerPos = OSCFunc({arg msg;
msg.postln;
//imgPositions.postln[lastSelect.postln];
if(lastSelect != nil, {
var distX, distY, isFocusing;
distX = curPos.x - imgPositions[lastSelect].x;
distY = curPos.y - imgPositions[lastSelect].y;
isFocusing = false;
if((imgPositions[lastSelect] != nil) && (distX.abs < 500) && (msg[1].abs > 6000), {
tarPos.x = curPos.x + (10 * msg[1].sign * -1);
isFocusing = true;
//NetAddr.localAddr.sendMsg('/jog_horizontal', msg[1].sign.postln * -1);
});
if((imgPositions[lastSelect] != nil) && (distY.abs < 500) && (msg[2].abs > 6000), {
tarPos.y = curPos.y + (10 * msg[2].sign * -1);
isFocusing = true;
});
if(isFocusing, {moveTo.value(tarPos)});
})
//msg.postln;
}, '/trackerpos');
)
~serialPort.close

Loading…
Cancel
Save