pythontwitchviewer

Python Thread error in Twitch Viewer bot script


I am Currently Using:

Twitch Viewer Bot

And have been tweaking the code to get it working. THis is what I currently have:

import requests
import subprocess
import json
import sys
import threading
import time
from multiprocessing import Queue
import urllib3
import urllib3.contrib.pyopenssl

urllib3.contrib.pyopenssl.inject_into_urllib3()
#urllib3.disable_warnings() unverified HTTPS requests
numberOfViewers = int(sys.argv[1])
builderThreads = int(sys.argv[2])
startTime = time.time()
numberOfSockets = 0
concurrent = 25
urls = []
urlsUsed = []
proxies = {
    'http': 'http://173.201.183.172:8000',
    'http': 'http://94.181.34.64:81',
}


# def getURL(): # Get tokens
#   output = subprocess.Popen(["livestreamer", "twitch.tv/swagvyper", "-j"], stdout=subprocess.PIPE).communicate()[0]
#   return json.loads(output)['streams']['worst']['url'] # Parse json and return the URL parameter

def getURL():
    output = urlopen('http://www.twitch.tv/CHANNEL') # Any URL
    return json.load(output)['streams']['worst']['url']

def build(): # Builds a set of tokens, aka viewers
    global numberOfSockets
    global numberOfViewers
    while True:
        if numberOfSockets < numberOfViewers:
            numberOfSockets += 1
            print("Building viewers " + str(numberOfSockets) + "/" + str(numberOfViewers))
            urls.append(getURL())

def view(): # Opens connections to send views
    global numberOfSockets
    while True:
        url=q.get()
        # requests.head(url, proxies=proxies)
        requests.head(url) # Sending a HEAD request is enough to count as a view
        if (url in urlsUsed):
            urls.remove(url)
            urlsUsed.remove(url)
            numberOfSockets -= 1
        else:
            urlsUsed.append(url)
        q.task_done()

if __name__ == '__main__':
    for i in range(0, builderThreads):
        threading.Thread(target = build).start()

    while True:
        while (numberOfViewers != numberOfSockets): # Wait until sockets are built
            time.sleep(1)

        q=Queue(concurrent*2)
        for i in range(concurrent):
            try:
                t=threading.Thread(target=view)
                t.daemon=True
                t.start()
            except:
                print('thread error')
        try:
            for url in urls:
                print(url)
                q.put(url.strip())
                q.join()
        except KeyboardInterrupt:
            sys.exit(1)

However when running I appear to be getting this error and im not 100% sure where to go from here:

thread errorException in thread Thread-1:
Traceback (most recent call last):
File "C:\Program Files (x86)\Python36-32\lib\threading.py", line 916, in _bootstrap_inner
    self.run()
File "C:\Program Files (x86)\Python36-32\lib\threading.py", line 864, in run
    self._target(*self._args, **self._kwargs)
File "bot.py", line 41, in build
    urls.append(getURL())
File "bot.py", line 31, in getURL
    output = urlopen('http://www.twitch.tv/dsboywonder25') # Any URL
NameError: name 'urlopen' is not defined

Traceback (most recent call last):
File "bot.py", line 70, in <module>
File "C:\Program Files (x86)\Python36-32\lib\threading.py", line 846, in start
MemoryError

During handling of the above exception, another exception occurred:

Traceback (most recent call last):
File "bot.py", line 72, in <module>
MemoryError

I wondered if anyone has seen this before or even point me in the right direct. THank you in advance :)


Solution

  • File "bot.py", line 31, in getURL output = urlopen('http://www.twitch.tv/dsboywonder25') # Any URL NameError: name 'urlopen' is not defined

    urlopen is not defined: you haven't declared or imported urlopen from anywhere so the script doesn't know what you're talking about.

    urlopen is a method of urllib/urllib2. You've imported requests and urllib3 which are two other modules for making http requests. You'll need to decide which module is most useful for the given application and use the appropriate method for opening the connections/making the requests you need.