From 44fa415441aa476e56e3e3f75c580f8b96909eb1 Mon Sep 17 00:00:00 2001 From: Jose Blanco Date: Tue, 28 Jan 2025 11:40:45 -0500 Subject: [PATCH] cleanup code --- src/app/app-routing.module.ts | 5 - src/app/clockss-redirect.component.ts | 13 -- .../clockss-redirect.component.ts | 13 -- .../robots-redirect.component.ts | 13 -- src/assets/clockss.txt | 1 - src/assets/robots.txt | 151 ------------------ 6 files changed, 196 deletions(-) delete mode 100644 src/app/clockss-redirect.component.ts delete mode 100644 src/app/clockss-redirect/clockss-redirect.component.ts delete mode 100644 src/app/robots-redirect/robots-redirect.component.ts delete mode 100644 src/assets/clockss.txt delete mode 100644 src/assets/robots.txt diff --git a/src/app/app-routing.module.ts b/src/app/app-routing.module.ts index 8b61a61ea3d..b33a8ff14fb 100644 --- a/src/app/app-routing.module.ts +++ b/src/app/app-routing.module.ts @@ -2,9 +2,6 @@ import { NgModule } from '@angular/core'; import { RouterModule, NoPreloading } from '@angular/router'; import { AuthBlockingGuard } from './core/auth/auth-blocking.guard'; import { ExternalRedirectComponent } from './external-redirect/external-redirect.component'; -import { RobotsComponent } from './robots-redirect/robots-redirect.component'; -import { ClockssComponent } from './clockss-redirect/clockss-redirect.component'; - import { AuthenticatedGuard } from './core/auth/authenticated.guard'; import { @@ -59,8 +56,6 @@ import { ThemedPageErrorComponent } from './page-error/themed-page-error.compone resolve: [MenuResolver], children: [ { path: '', component: ExternalRedirectComponent, pathMatch: 'full' }, - { path: 'robots123.txt', component: RobotsComponent }, - { path: 'clockss123.txt', component: ClockssComponent }, { path: 'reload/:rnd', component: ThemedPageNotFoundComponent, diff --git a/src/app/clockss-redirect.component.ts b/src/app/clockss-redirect.component.ts deleted file mode 100644 index 614aeae4eb0..00000000000 --- a/src/app/clockss-redirect.component.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Component, OnInit } from '@angular/core'; - -@Component({ - selector: 'clockss-redirect', - template: '', -}) -export class ClockssComponent implements OnInit { - constructor() {} - - ngOnInit(): void { - window.location.href = 'https://www.lib.umich.edu/collections/deep-blue-repositories'; - } -} \ No newline at end of file diff --git a/src/app/clockss-redirect/clockss-redirect.component.ts b/src/app/clockss-redirect/clockss-redirect.component.ts deleted file mode 100644 index 4be70b6ec93..00000000000 --- a/src/app/clockss-redirect/clockss-redirect.component.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Component, OnInit } from '@angular/core'; - -@Component({ - selector: 'app-clockss', - template: '', -}) -export class ClockssComponent implements OnInit { - constructor() {} - - ngOnInit(): void { - window.location.href = 'assets/clockss.txt'; - } -} \ No newline at end of file diff --git a/src/app/robots-redirect/robots-redirect.component.ts b/src/app/robots-redirect/robots-redirect.component.ts deleted file mode 100644 index 205f2a49a0b..00000000000 --- a/src/app/robots-redirect/robots-redirect.component.ts +++ /dev/null @@ -1,13 +0,0 @@ -import { Component, OnInit } from '@angular/core'; - -@Component({ - selector: 'robots-redirect', - template: '', -}) -export class RobotsComponent implements OnInit { - constructor() {} - - ngOnInit(): void { - window.location.href = 'assets/robots.txt'; - } -} \ No newline at end of file diff --git a/src/assets/clockss.txt b/src/assets/clockss.txt deleted file mode 100644 index fb494d1227e..00000000000 --- a/src/assets/clockss.txt +++ /dev/null @@ -1 +0,0 @@ -CLOCKSS system has permission to ingest, preserve, and serve this Archival Unit. diff --git a/src/assets/robots.txt b/src/assets/robots.txt deleted file mode 100644 index cd5e228b3a0..00000000000 --- a/src/assets/robots.txt +++ /dev/null @@ -1,151 +0,0 @@ -# The URL to the DSpace sitemaps -# XML sitemap is listed first as it is preferred by most search engines -Sitemap: <%= origin %>/sitemap_index.xml -Sitemap: <%= origin %>/sitemap_index.html - -########################## -# Default Access Group -# (NOTE: blank lines are not allowable in a group record) -########################## -User-agent: * -# Disable access to Discovery search and filters; admin pages; processes; submission; workspace; workflow & profile page -Disallow: /search -Disallow: /admin/* -Disallow: /processes -Disallow: /submit -Disallow: /workspaceitems -Disallow: /profile -Disallow: /workflowitems - -# Optionally uncomment the following line ONLY if sitemaps are working -# and you have verified that your site is being indexed correctly. -# Disallow: /browse/* -# -# If you have configured DSpace (Solr-based) Statistics to be publicly -# accessible, then you may not want this content to be indexed -# Disallow: /statistics -# -# You also may wish to disallow access to the following paths, in order -# to stop web spiders from accessing user-based content -# Disallow: /contact -# Disallow: /feedback -# Disallow: /forgot -# Disallow: /login -# Disallow: /register - - -############################## -# Section for misbehaving bots -# The following directives to block specific robots were borrowed from Wikipedia's robots.txt -############################## - -# advertising-related bots: -User-agent: Mediapartners-Google* -Disallow: / - -# Crawlers that are kind enough to obey, but which we'd rather not have -# unless they're feeding search engines. -User-agent: UbiCrawler -Disallow: / - -User-agent: DOC -Disallow: / - -User-agent: Zao -Disallow: / - -# Some bots are known to be trouble, particularly those designed to copy -# entire sites. Please obey robots.txt. -User-agent: sitecheck.internetseer.com -Disallow: / - -User-agent: Zealbot -Disallow: / - -User-agent: MSIECrawler -Disallow: / - -User-agent: SiteSnagger -Disallow: / - -User-agent: WebStripper -Disallow: / - -User-agent: WebCopier -Disallow: / - -User-agent: Fetch -Disallow: / - -User-agent: Offline Explorer -Disallow: / - -User-agent: Teleport -Disallow: / - -User-agent: TeleportPro -Disallow: / - -User-agent: WebZIP -Disallow: / - -User-agent: linko -Disallow: / - -User-agent: HTTrack -Disallow: / - -User-agent: Microsoft.URL.Control -Disallow: / - -User-agent: Xenu -Disallow: / - -User-agent: larbin -Disallow: / - -User-agent: libwww -Disallow: / - -User-agent: ZyBORG -Disallow: / - -User-agent: Download Ninja -Disallow: / - -# Misbehaving: requests much too fast: -User-agent: fast -Disallow: / - -# -# If your DSpace is going down because of someone using recursive wget, -# you can activate the following rule. -# -# If your own faculty is bringing down your dspace with recursive wget, -# you can advise them to use the --wait option to set the delay between hits. -# -#User-agent: wget -#Disallow: / - -# -# The 'grub' distributed client has been *very* poorly behaved. -# -User-agent: grub-client -Disallow: / - -# -# Doesn't follow robots.txt anyway, but... -# -User-agent: k2spider -Disallow: / - -# -# Hits many times per second, not acceptable -# http://www.nameprotect.com/botinfo.html -User-agent: NPBot -Disallow: / - -# A capture bot, downloads gazillions of pages with no public benefit -# http://www.webreaper.net/ -User-agent: WebReaper -Disallow: /