Merch public beta

This commit is contained in:
Mentor 2022-01-27 10:18:37 +01:00
parent 25243a06bd
commit 44f486df95
2 changed files with 109 additions and 0 deletions

108
community/scraping.js Normal file
View File

@ -0,0 +1,108 @@
/* ///////////////////////////////
// Twitter
// scraping for signer.is
// /////////////////////////////*/
function get_address_from_base64( text ) {
const [ batch, base64 ] = text.match( /(?:https:\/\/signer.is\/#\/verify\/)(.*?)(?:(<\/)|(">)|($))/ ) || []
try {
const text = atob( base64 )
const json = JSON.parse( decodeURIComponent( text ) )
return json.claimed_signatory
} catch( e ) {
console.log( `Decoding error for ${ base64 } `, e )
return false
}
}
async function get_addresses_from_twitter_links( links ) {
const resolved_twitter_redirects = await Promise.all( links.map( url => fetch( url ).then( res => res.text() ) ) )
const addresses = resolved_twitter_redirects.map( get_address_from_base64 )
return addresses
}
async function scrape_signer_links_in_replies( ) {
console.log( '⚠️ Disable security policy headers with a chrome extension' )
const hrefs = document.querySelectorAll( 'a' )
const has_signer_is = [ ...hrefs ].filter( ( { innerText, ...rest } ) => {
return innerText.includes( 'signer.is/#/verify' )
} )
const signer_is_hrefs = has_signer_is.map( ( { href } ) => href )
const addresses = await get_addresses_from_twitter_links( signer_is_hrefs )
console.log( addresses.join( '\n' ) )
}
async function scrape_signer_links_in_dm( ) {
console.log( `This function runs for an indeterminate length, keep an eye on it and run get_addresses_from_twitter_links when results stagnate` )
const wait = ( durationinMs=1000 ) => new Promise( resolve => setTimeout( resolve, durationinMs ) )
function get_handle_from_element( element ) {
const [ match, handle ] = element.innerHTML.match( /(@.+?)(?:<\/)/ )
if( handle ) return handle
else return false
}
const hits = []
const done = []
while( true ) {
const messages = document.querySelectorAll( '[aria-selected=false]' )
for (let i = messages.length - 1; i >= 0; i--) {
// Get the handle of the message we are trying
const handle = get_handle_from_element( messages[i] )
if( done.includes( handle ) ) continue
if( !messages[i].isConnected ) continue
// open the message panel and grab the link
messages[i].click()
await wait()
const links = document.querySelectorAll( 'a' )
const { href, ...rest } = [ ...links ].find( ( { innerText } ) => innerText.includes( 'signer.is/#/verify' ) ) || []
// Save the link and mark the handle as done of need be
if( href ) hits.push( href )
done.push( handle )
document.querySelector( '[aria-label="Back"]' ).click()
await wait()
}
console.log( `Checked ${ done.length } handles. Found: `, hits )
}
}
function discord_channel_scraping() {
const hrefs = document.querySelectorAll( 'a' )
const has_signer_is = [ ...hrefs ].filter( ( { innerText, ...rest } ) => {
return innerText.includes( 'signer.is/#/verify' )
} )
console.log( has_signer_is[0].href )
const signer_is_hrefs = has_signer_is.map( ( { title } ) => title )
const addresses = signer_is_hrefs.map( get_address_from_base64 )
console.log( addresses.join( '\n' ) )
}

View File

@ -0,0 +1 @@
<?xml version="1.0" encoding="UTF-8"?><!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"><svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="24" height="24" viewBox="0 0 24 24"><path d="M9.06,1.93C7.17,1.92 5.33,3.74 6.17,6H3A2,2 0 0,0 1,8V10A1,1 0 0,0 2,11H11V8H13V11H22A1,1 0 0,0 23,10V8A2,2 0 0,0 21,6H17.83C19,2.73 14.6,0.42 12.57,3.24L12,4L11.43,3.22C10.8,2.33 9.93,1.94 9.06,1.93M9,4C9.89,4 10.34,5.08 9.71,5.71C9.08,6.34 8,5.89 8,5A1,1 0 0,1 9,4M15,4C15.89,4 16.34,5.08 15.71,5.71C15.08,6.34 14,5.89 14,5A1,1 0 0,1 15,4M2,12V20A2,2 0 0,0 4,22H20A2,2 0 0,0 22,20V12H13V20H11V12H2Z" /></svg>

After

Width:  |  Height:  |  Size: 695 B