I'm using a Raspberry Pi Picamera board to capture data, using the following code:
with picamera.PiCamera(
    sensor_mode=4,
    resolution='1640x1232',
    framerate=30
    ) as camera:
    camera.rotation = 180
    camera.start_recording(StreamingOutput(), format='mjpeg')
try:
    server = StreamingServer(('', 8000), StreamingHandler)
    server.serve_forever()
finally:
    camera.stop_recording()
class StreamingOutput:
    def __init__(self):
        self.frame = None
        self.condition = threading.Condition()
        self._buffer = io.BytesIO()
    def write(self, buf):
        if buf.startswith(b'\xff\xd8'):
            # New frame, copy the existing buffer's content and notify all
            # clients it's available
            self._buffer.truncate()
            with self.condition:
                self.frame = self._buffer.getvalue()
                self.condition.notify_all()
            self._buffer.seek(0)
class StreamingServer(socketserver.ThreadingMixIn, server.HTTPServer):
    allow_reuse_address = True
    daemon_threads = True
class StreamingHandler(server.BaseHTTPRequestHandler):
    def do_GET(self):
        if self.path == '/capture.jpg':
            with self._output.condition:
                self._output.condition.wait()
                frame = self._output.frame
            # This works.
            with open("frame.jpg", 'wb') as f:
                f.write(frame)
            # This produces a truncated image.
            self.send_response(200)
            self.send_header('Content-Type', 'image/jpeg')
            self.send_header('Content-Length', len(frame))
            self.end_headers()
            self.wfile.write(frame)
It's the damndest thing: although the image will save to disk just fine (frame.jpg is completely fine), it'll produce a truncated image if going through the HTTP server. Here's a screenshot:
I've tried a number of different things, and I'm at a dead end. Any ideas?
