Python-TypeError:“ NoneType”对象不可下标

时间:2019-04-01 16:41:59

标签: python

当我尝试在while循环中插入插入查询时出现此错误

TypeError:“ NoneType”对象不可下标

def get_data():
    try:
        conn = mysql.connector.connect(host='localhost',
                                       database='mydb',
                                       user='root',
                                       password='')
        cursor = conn.cursor(buffered=True)
        cursor.execute("SELECT * FROM my_urls WHERE crawl=0")

        row = cursor.fetchone()

        new_data = []

        while row is not None:
            page = requests.get(row[2])

            soup = BeautifulSoup(page.content, 'html.parser')

            #high quality link
            downloads = soup.findAll("li", class_="download-link")
            highq = downloads[-1]
            videofile = highq.find("a").attrs['href']

            #title
            title = soup.find("h1", class_="vone__title")
            title = title.text
            title = title.strip()

            #description
            description = soup.find("div", class_="vone__desc")
            description = description.text
            description = description.strip()

            #video thumbnail
            match = re.search("JSON.parse\(\'(.*)\'\)",page.text)
            thumbnail = ''
            if match:
                thumbnail = json.loads(match.group(1))['poster']

            #meta title
            meta_title = title + " | Test"

            #meta description
            meta_description = "Test."

            #meta keys
            meta_keys = "Test"

            #userid
            user_id = row[1]

            #slug
            slug = title
            slug = slug.replace(" - ", "-")
            slug = re.sub('/\s+/g', '-', slug)
            slug = slug.lower()



            active = 1
            row_data = (user_id,title,description,slug,meta_title,meta_description,meta_keys,active,thumbnail,row[2],'0',videofile)

            sql = "INSERT INTO media (user_id,title,description,slug,meta_title,meta_desc,meta_keys,active,video_thumbnail,ap_link,downloading,ext_filename) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

            cursor.execute(sql, row_data)

            row = cursor.fetchone()



    except Error as e:
        print(e)

    finally:

        conn.commit()      

        cursor.close()
        conn.close()


if __name__ == '__main__':
    get_data()

我已经打印了要插入的数据,而元组中没有任何内容为空。

这是发生错误的那一行

row_data = (user_id,title,description,slug,meta_title,meta_description,meta_keys,active,thumbnail,row[2],'0',videofile)

            sql = "INSERT INTO media (user_id,title,description,slug,meta_title,meta_desc,meta_keys,active,video_thumbnail,ap_link,downloading,ext_filename) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

            cursor.execute(sql, row_data)

完整错误:

 File "C:\Users\mich\Desktop\Work\test.py", line 75, in get_data
    row = cursor.fetchone()
  File "C:\laragon\bin\python\python-3.6.1\lib\site-packages\mysql\connector\cursor_cext.py", line 697, in fetchone
    return self._fetch_row()
  File "C:\laragon\bin\python\python-3.6.1\lib\site-packages\mysql\connector\cursor_cext.py", line 669, in _fetch_row
    row = self._rows[self._next_row]
TypeError: 'NoneType' object is not subscriptable

我该如何解决?

1 个答案:

答案 0 :(得分:1)

您不能在cursor.execute()所使用的光标上使用cursor.fetchone()。参见MySQLCursor.fetchone()

  

在使用同一连接执行新语句之前,必须获取当前查询的所有行。

尝试insert  使用第二个光标:

def get_data():
    try:
        conn = mysql.connector.connect(host='localhost',
                                       database='mydb',
                                       user='root',
                                       password='')
        cursor = conn.cursor(buffered=True)
        cursor2 = conn.cursor()
        cursor.execute("SELECT * FROM my_urls WHERE crawl=0")

        row = cursor.fetchone()

        new_data = []

        while row is not None:
            page = requests.get(row[2])

            soup = BeautifulSoup(page.content, 'html.parser')

            #high quality link
            downloads = soup.findAll("li", class_="download-link")
            highq = downloads[-1]
            videofile = highq.find("a").attrs['href']

            #title
            title = soup.find("h1", class_="vone__title")
            title = title.text
            title = title.strip()

            #description
            description = soup.find("div", class_="vone__desc")
            description = description.text
            description = description.strip()

            #video thumbnail
            match = re.search("JSON.parse\(\'(.*)\'\)",page.text)
            thumbnail = ''
            if match:
                thumbnail = json.loads(match.group(1))['poster']

            #meta title
            meta_title = title + " | Test"

            #meta description
            meta_description = "Test."

            #meta keys
            meta_keys = "Test"

            #userid
            user_id = row[1]

            #slug
            slug = title
            slug = slug.replace(" - ", "-")
            slug = re.sub('/\s+/g', '-', slug)
            slug = slug.lower()



            active = 1
            row_data = (user_id,title,description,slug,meta_title,meta_description,meta_keys,active,thumbnail,row[2],'0',videofile)

            sql = "INSERT INTO media (user_id,title,description,slug,meta_title,meta_desc,meta_keys,active,video_thumbnail,ap_link,downloading,ext_filename) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"

            cursor2.execute(sql, row_data)

            row = cursor.fetchone()



    except Error as e:
        print(e)

    finally:

        conn.commit()      

        cursor.close()
        conn.close()


if __name__ == '__main__':
    get_data()