1

Trying to append data to the dataframe but error raises

index contains duplicate entries

I want data in csv file basically they show me error in these line:

df = df.pivot(index=['v1','v2','v3'], columns='image', values='link').reset_index().fillna('')

Code:

import requests
from bs4 import BeautifulSoup
import pandas as pd
baseurl='https://twillmkt.com'
headers ={
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'
}
r =requests.get('https://twillmkt.com/collections/denim')
soup=BeautifulSoup(r.content, 'html.parser')
tra = soup.find_all('div',class_='ProductItem__Wrapper')
productlinks=[]
for links in tra:
    for link in links.find_all('a',href=True)[:1]:
        comp=baseurl+link['href']
        productlinks.append(comp)
data=[]    
u=[]
k=[]
w=[]
n=[]   
for link in productlinks:
    r =requests.get(link,headers=headers)
    soup=BeautifulSoup(r.content, 'html.parser')
    up = soup.find('div',class_='Product__SlideshowNavScroller')
    for e,pro in enumerate(up):
        t=pro.find('img').get('src')
        data.append({'image':'Image '+str(e)+' UI','link':t})
        dup = soup.find_all('div',class_='OptionSelector list-options')
        for ro in dup:
            m=[k.text.strip() for k in ro.find_all('button')]
            variant1=m[0]
            variant2=m[1]
            variant3=m[2]
            data.append({'image':'Image '+str(e)+' UI','link':t,'v1':variant1,'v2':variant2,'v3':variant3})

df = pd.DataFrame(data)
df.image=pd.Categorical(df.image,categories=df.image.unique(),ordered=True)
df = df.pivot(index=['v1','v2','v3'], columns='image', values='link').reset_index().fillna('')
df.to_csv('yt.csv')
```
HedgeHog
  • 22,146
  • 4
  • 14
  • 36
Amen Aziz
  • 769
  • 2
  • 13
  • Does [this](https://stackoverflow.com/questions/28651079/pandas-unstack-problems-valueerror-index-contains-duplicate-entries-cannot-re) help? – Kamalesh S Dec 23 '21 at 14:25

1 Answers1

0

What happens?

One reason for duplicates is you are appending information to data you may not need:

data.append({'image':'Image '+str(e)+' UI','link':t})

cause you also append:

data.append({'image':'Image '+str(e)+'UI','link':t,'v1':variant1,'v2':variant2,'v3':variant3}) 

But main issue is you are using index=['v1','v2','v3'] to build the index, but this is not a combination that is still unique.

How to fix?

  1. Skip this line data.append({'image':'Image '+str(e)+' UI','link':t})

  2. Add also the id of the product data.append({'id':t.split('=')[-1],'image':'Image '+str(e)+' UI','link':t,'v1':variant1,'v2':variant2,'v3':variant3})

  3. Use id to build a unique index index=['id','v1','v2','v3']

Example

import requests
from bs4 import BeautifulSoup
import pandas as pd
baseurl='https://twillmkt.com'
headers ={
    'User-Agent':'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.114 Safari/537.36'
}
r =requests.get('https://twillmkt.com/collections/denim')
soup=BeautifulSoup(r.content, 'html.parser')
tra = soup.find_all('div',class_='ProductItem__Wrapper')
productlinks=[]
for links in tra:
    for link in links.find_all('a',href=True)[:1]:
        comp=baseurl+link['href']
        productlinks.append(comp)

data=[]

for link in productlinks:
    r =requests.get(link,headers=headers)
    soup=BeautifulSoup(r.content, 'html.parser')
    up = soup.find('div',class_='Product__SlideshowNavScroller')
    for e,pro in enumerate(up):
        t=pro.find('img').get('src')
        dup = soup.find_all('div',class_='OptionSelector list-options')

        for ro in dup:
            m=[k.text.strip() for k in ro.find_all('button')]
            variant1=m[0]
            variant2=m[1]
            variant3=m[2]
            data.append({'id':t.split('=')[-1],'image':'Image '+str(e)+' UI','link':t,'v1':variant1,'v2':variant2,'v3':variant3})

df = pd.DataFrame(data)
df.image=pd.Categorical(df.image,categories=df.image.unique(),ordered=True)
df = df.pivot(index=['id','v1','v2','v3'], columns='image', values='link').reset_index().fillna('')
df

`

HedgeHog
  • 22,146
  • 4
  • 14
  • 36