javac#androidaccessibilityservicemouse-coordinates

How perform a drag (based in X,Y mouse coordinates) on Android using AccessibilityService?


I want know how to perform a drag on android based in X, Y mouse coordinates? consider as two simple examples, the Team Viewer/QuickSupport drawing the "password pattern" on remote smartphone and the Pen of Windows Paint respectively.

enter image description here

enter image description here

All that i'm able of make is simulate touch (with dispatchGesture() and also AccessibilityNodeInfo.ACTION_CLICK).

I found these relevants links, but not know if they can be useful:

Below is my working code that is used to send mouse coordinates (inside of PictureBox control) to remote phone and simulate touch.

Windows Forms Application:

private void pictureBox1_MouseDown(object sender, MouseEventArgs e)
{
    foreach (ListViewItem item in lvConnections.SelectedItems)
    {
        // Remote screen resolution
        string[] tokens = item.SubItems[5].Text.Split('x'); // Ex: 1080x1920

        int xClick = (e.X * int.Parse(tokens[0].ToString())) / (pictureBox1.Size.Width);
        int yClick = (e.Y * int.Parse(tokens[1].ToString())) / (pictureBox1.Size.Height);

        Client client = (Client)item.Tag;

        if (e.Button == MouseButtons.Left)
            client.sock.Send(Encoding.UTF8.GetBytes("TOUCH" + xClick + "<|>" + yClick + Environment.NewLine));
    }
}

Edit:

My last attempt was a "swipe screen" using mouse coordinates (C# Windows Forms Application) and a custom android routine (with reference to code of "swipe screen" linked above), respectively:

private Point mdownPoint = new Point();

private void pictureBox1_MouseDown(object sender, MouseEventArgs e)
{
    foreach (ListViewItem item in lvConnections.SelectedItems)
    {
        // Remote screen resolution
        string[] tokens = item.SubItems[5].Text.Split('x'); // Ex: 1080x1920

        Client client = (Client)item.Tag;

        if (e.Button == MouseButtons.Left)
        {
            xClick = (e.X * int.Parse(tokens[0].ToString())) / (pictureBox1.Size.Width); 
            yClick = (e.Y * int.Parse(tokens[1].ToString())) / (pictureBox1.Size.Height);

            // Saving start position:

            mdownPoint.X = xClick; 
            mdownPoint.Y = yClick; 

            client.sock.Send(Encoding.UTF8.GetBytes("TOUCH" + xClick + "<|>" + yClick + Environment.NewLine));
        }
    }
}

private void PictureBox1_MouseMove(object sender, MouseEventArgs e)
{
    foreach (ListViewItem item in lvConnections.SelectedItems)
    {
        // Remote screen resolution
        string[] tokens = item.SubItems[5].Text.Split('x'); // Ex: 1080x1920

        Client client = (Client)item.Tag;

        if (e.Button == MouseButtons.Left)
        {
            xClick = (e.X * int.Parse(tokens[0].ToString())) / (pictureBox1.Size.Width);
            yClick = (e.Y * int.Parse(tokens[1].ToString())) / (pictureBox1.Size.Height);

            client.sock.Send(Encoding.UTF8.GetBytes("MOUSESWIPESCREEN" + mdownPoint.X + "<|>" + mdownPoint.Y + "<|>" + xClick + "<|>" + yClick + Environment.NewLine));
        }
    }
}

android AccessibilityService:

public void Swipe(int x1, int y1, int x2, int y2, int time) {

if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.N) {
    System.out.println(" ======= Swipe =======");

    GestureDescription.Builder gestureBuilder = new GestureDescription.Builder();
    Path path = new Path();
    path.moveTo(x1, y1);
    path.lineTo(x2, y2);

    gestureBuilder.addStroke(new GestureDescription.StrokeDescription(path, 100, time));
    dispatchGesture(gestureBuilder.build(), new GestureResultCallback() {
        @Override
        public void onCompleted(GestureDescription gestureDescription) {
            System.out.println("SWIPE Gesture Completed :D");
            super.onCompleted(gestureDescription);
        }
    }, null);
}

}

that produces the following result (but still not is able to draw "pattern password" like TeamViewer for example). But like said on comment below, I think that with a similar approach this can be achieved using Continued gestures probably. Any suggestions in this direction will be welcome.

enter image description here

enter image description here


Edit 2:

Definitely, the solution is continued gestures like said on previous Edit.

And below is a supposed fixed code that i found here =>

android AccessibilityService:

// Simulates an L-shaped drag path: 200 pixels right, then 200 pixels down.
Path path = new Path();
path.moveTo(200,200);
path.lineTo(400,200);

final GestureDescription.StrokeDescription sd = new GestureDescription.StrokeDescription(path, 0, 500, true);

// The starting point of the second path must match
// the ending point of the first path.
Path path2 = new Path();
path2.moveTo(400,200);
path2.lineTo(400,400);

final GestureDescription.StrokeDescription sd2 = sd.continueStroke(path2, 0, 500, false); // 0.5 second

HongBaoService.mService.dispatchGesture(new GestureDescription.Builder().addStroke(sd).build(), new AccessibilityService.GestureResultCallback(){

@Override
public void onCompleted(GestureDescription gestureDescription){
super.onCompleted(gestureDescription);
HongBaoService.mService.dispatchGesture(new GestureDescription.Builder().addStroke(sd2).build(),null,null);
}

@Override
public void onCancelled(GestureDescription gestureDescription){
super.onCancelled(gestureDescription);
}
},null);

Then, my doubt is: how send correctly mouse coordinates for code above, of the way that can perform drag to any direction? Some idea?


Edit 3:

I found two routines that are used to perform drag, but they are using UiAutomation + injectInputEvent(). AFAIK, injection of event works only in a system app like said here and here and i not want it.

This are routines found:

Then to achieve my goal, i think that 2rd routine is more appropriated to use (following the logic, excluding event injection) with code showed on Edit 2 and send all points of pictureBox1_MouseDown and pictureBox1_MouseMove (C# Windows Forms Application) respectively to fill Point[] dynamically and on pictureBox1_MouseUp send cmd to execute the routine and use this array filled. If you have a idea to 1st routine, let me know :D.

If after read this Edit you have a possible solution, show me in a answer please, while i will try and test this idea.


Solution

  • Here is a example of a solution based on Edit 3 of question.


    C# Windows Froms Application "formMain.cs":

    using System.Net.Sockets;
    
    private List<Point> lstPoints;
    
    private void pictureBox1_MouseDown(object sender, MouseEventArgs e) 
    {
        if (e.Button == MouseButtons.Left)
        {
            lstPoints = new List<Point>();
            lstPoints.Add(new Point(e.X, e.Y));
        }
    }
    
    private void PictureBox1_MouseMove(object sender, MouseEventArgs e)
    {
        if (e.Button == MouseButtons.Left)
        {
            lstPoints.Add(new Point(e.X, e.Y));
        }
    }
    
    private void PictureBox1_MouseUp(object sender, MouseEventArgs e)
    {
        lstPoints.Add(new Point(e.X, e.Y));
    
        StringBuilder sb = new StringBuilder();
    
        foreach (Point obj in lstPoints)
        {
            sb.Append(Convert.ToString(obj) + ":");
        }
    
        serverSocket.Send("MDRAWEVENT" + sb.ToString() + Environment.NewLine);
    }
    

    android service "SocketBackground.java":

    import java.net.Socket;
    
    String xline;
    
    while (clientSocket.isConnected()) {
    
        BufferedReader xreader = new BufferedReader(new InputStreamReader(clientSocket.getInputStream(), StandardCharsets.UTF_8));
    
        if (xreader.ready()) {
    
            while ((xline = xreader.readLine()) != null) {
                    xline = xline.trim();
    
                if (xline != null && !xline.trim().isEmpty()) {
    
                    if (xline.contains("MDRAWEVENT")) {
    
                        String coordinates = xline.replace("MDRAWEVENT", "");
                        String[] tokens = coordinates.split(Pattern.quote(":"));
                        Point[] moviments = new Point[tokens.length];
    
                        for (int i = 0; i < tokens.length; i++) {
                           String[] coordinates = tokens[i].replace("{", "").replace("}", "").split(",");
    
                           int x = Integer.parseInt(coordinates[0].split("=")[1]);
                           int y = Integer.parseInt(coordinates[1].split("=")[1]);
    
                           moviments[i] = new Point(x, y);
                        }
    
                        MyAccessibilityService.instance.mouseDraw(moviments, 2000);
                    }
                }
            }
        }
    }
    

    android AccessibilityService "MyAccessibilityService.java":

    public void mouseDraw(Point[] segments, int time) {
        if (android.os.Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) {
    
            Path path = new Path();
            path.moveTo(segments[0].x, segments[0].y);
    
            for (int i = 1; i < segments.length; i++) {
    
                path.lineTo(segments[i].x, segments[i].y);
    
                GestureDescription.StrokeDescription sd = new GestureDescription.StrokeDescription(path, 0, time);
    
                dispatchGesture(new GestureDescription.Builder().addStroke(sd).build(), new AccessibilityService.GestureResultCallback() {
    
                    @Override
                    public void onCompleted(GestureDescription gestureDescription) {
                        super.onCompleted(gestureDescription);
                    }
    
                    @Override
                    public void onCancelled(GestureDescription gestureDescription) {
                        super.onCancelled(gestureDescription);
                    }
                }, null);
            }
        }
    }