diff --git a/.github/images/addSignals.png b/.github/images/addSignals.png new file mode 100644 index 0000000..b6e5f8a Binary files /dev/null and b/.github/images/addSignals.png differ diff --git a/README.md b/README.md index 8a2dcf3..2304cae 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,6 @@ With our now deobfuscated version of the script, we can begin to determine how t The main feature of the slide captcha is having the puzzle piece in the correct position. This is the first benchmark in determining whether the user is a human or a bot. To do this, we can use python's cv2 library to perform template matching. I found this approach to be 88-90% accurate, which isn't ideal, but considering it takes on average 0.02 seconds per detection, I believe where we sacrifice in accuracy we gain in speed and efficiency. - This process can be found [here](https://github.com/joekav/SlideCaptcha/tree/main/detection). ## Generating our payload @@ -78,8 +77,10 @@ This process can be found [here](https://github.com/joekav/SlideCaptcha/tree/mai ### Signals First, we can begin to add our own hardcoded values which we have collected from our own browser. Values such as screen sizes, user agents, device and device memory can all just be thrown straight into our payloads, as they are very generic and hard to fingerprint, as millions of devices will have similar values. +![signals](https://github.com/joekav/SlideCaptcha/tree/main/.github/images/addSignals.png?raw=true) + #### Events -Events are a bit different from the other values we will use, as we cannot hard code these since Datadome deem these an important piece of our fingerprint. I went with mouse events, as they seemed the easiest to replicate. Datadome collect the x and y pixels of the cursor, along with the timestamps at which each movement happens. I wrote a basic function to emulate these events, which focus on the cursor moving from a starting x value, to an x value determined by the location of the puzzle piece, as the mouse events stop recording upon a `mouseup` event. These coordinates and timestamps are then used to calculate many different values included in the signals. A standard deviation of both the x and y values and an average speed of x and y are two of them. +Events are a bit different from the other values we will use, as we cannot hard code these since Datadome deem these an important piece of our fingerprint. I went with mouse events, as they seemed the easiest to replicate. Datadome collect the x and y pixels of the cursor, along with the timestamps at which each movement happens. I wrote a [basic function](https://github.com/joekav/SlideCaptcha/tree/main/api/src/mouse.js) to emulate these events, which focus on the cursor moving from a starting x value, to an x value determined by the location of the puzzle piece, as the mouse events stop recording upon a `mouseup` event. These coordinates and timestamps are then used to calculate many different values included in the signals. A standard deviation of both the x and y values and an average speed of x and y are two of them. #### Canvas fingerprints diff --git a/api/src/gen.js b/api/src/gen.js index 624da50..ec49a4f 100644 --- a/api/src/gen.js +++ b/api/src/gen.js @@ -49,14 +49,14 @@ async function generatePayload(body) { var signals = new signalsClass(); signals.addSignal("v", "1.8.2"); signals.addSignal("h", "97946689b033b3727194dd2bffeb337ba15cd6fba2ab5775e8b5e6d255da6074"); - signals.addSignal("tstf", 16); // navigator.hardwareConcurrency * 2 - signals.addSignal("tagpu", randomFloatString(10, 16)); // hn is earlier performance.now() // performance.now() - hn + signals.addSignal("tstf", 16); + signals.addSignal("tagpu", randomFloatString(10, 16)); signals.addSignal("ccsT", "Error\nat a (:317:28)\nat :939:34\nat :1165:34\nat p.exports (:958:19)\nat g.sendPayload (:1499:25)\n"); signals.addSignal("ccsB", "t/static/chunks/4114.d5b644d4ccc9b88c.js:11:7944)\nat n.args. (https://cdn.oaistatic.com/_next/static/chunks/4114.d5b644d4ccc9b88c.js:59:153)"); - signals.addSignal("ccsH", 2520352591); // defined, never changed - signals.addSignal("ccsV", "a709286857318dc6587a0bd877c6010672ad8b5a8eae3d6b29e3c055f629cdf3"); // no idea - signals.addSignal("cssS", "1.36,2.78,8.63,14.96,3.02,4.56,10.73,4.15,2.79"); // no idea - signals.addSignal("css0", "32, 108, 5"); // no idea + signals.addSignal("ccsH", 2520352591); + signals.addSignal("ccsV", "a709286857318dc6587a0bd877c6010672ad8b5a8eae3d6b29e3c055f629cdf3"); + signals.addSignal("cssS", "1.36,2.78,8.63,14.96,3.02,4.56,10.73,4.15,2.79"); + signals.addSignal("css0", "32, 108, 5"); signals.addSignal("css1", "14.9216, 1.02084, -0.3228, 0.0300839, -0.149609, 0.793364, -4.4068, 0.410699, -0.173615, 2.90683, 1.16728, -0.108786, -1.86288, 31.1903, 12.5249, -0.167277"); // no idea signals.addSignal("cssH", "0px"); signals.addSignal("plgod", false);