Add ignore link and strip url links before checking it

This commit is contained in:
Marek Dano 2021-03-21 07:58:56 +00:00
parent 97cb773f45
commit 9722487f46

View file

@ -9,6 +9,7 @@ ignored_links = [
'https://github.com/public-apis/public-apis/actions?query=workflow%3A%22Run+tests%22', 'https://github.com/public-apis/public-apis/actions?query=workflow%3A%22Run+tests%22',
'https://github.com/public-apis/public-apis/workflows/Validate%20links/badge.svg?branch=master', 'https://github.com/public-apis/public-apis/workflows/Validate%20links/badge.svg?branch=master',
'https://github.com/public-apis/public-apis/actions?query=workflow%3A%22Validate+links%22', 'https://github.com/public-apis/public-apis/actions?query=workflow%3A%22Validate+links%22',
'https://github.com/davemachado/public-api',
] ]
def parse_links(filename): def parse_links(filename):
@ -28,20 +29,21 @@ def dup_links(links):
seen = {} seen = {}
dupes = [] dupes = []
for x in links: for link in links:
if x in ignored_links: link = link.rstrip('/')
if link in ignored_links:
continue continue
if x not in seen: if link not in seen:
seen[x] = 1 seen[link] = 1
else: else:
if seen[x] == 1: if seen[link] == 1:
dupes.append(x) dupes.append(link)
if not dupes: if not dupes:
print(f"No duplicated links") print(f"No duplicate links")
else: else:
print(f"Found duplicated links: {dupes}") print(f"Found duplicate links: {dupes}")
hasError = True hasError = True
return hasError return hasError