GIF89a=( �' 7IAXKgNgYvYx\%wh&h}t�h%�s%x�}9�R��&�0%� (�.��5�SD��&�a)�x5��;ͣ*ȡ&ղ)ׯ7׵<ѻ4�3�H֧KͯT��Y�aq��q��F� !� ' !� NETSCAPE2.0 , =( ��pH,�Ȥr�l:xШtJ�Z�جv��z��xL.:��z�n���|N�����~�������& !�0`9R�}��"�"a:S�~x��������g���E�������R���E����B�� ��ȸ��D���"�Ů� �H��L��D٫D�B�����D���T���H �G��A R�ڐ |�� ٭&��E8�S�kG�A�px�a��� R2XB��E8I���6X�:vT)�~��q�賥��"F~%x� � 4#Z�0O|-4Bs�X:= Q� Sal��yXJ`GȦ|s h��K3l7�B|�$'7Jީܪ0!��D�n=�P� ����0`�R�lj����v>���5 �.69�ϸd�����nlv�9��f{���Pbx �l5}�p� ��� �3a���I�O����!ܾ���i��9��#��)p�a ޽ �{�)vm��%D~ 6f��s}Œ�D�W E�`!� �&L8x� �ܝ{)x`X/>�}m��R�*|`D�=�_ ^�5 !_&'a�O�7�c��`DCx`�¥�9�Y�F���`?��"� �n@`�} lď��@4>�d S �v�xN��"@~d��=�g�s~G��� ���ud &p8Q�)ƫlXD����A~H�ySun�j���k*D�LH�] ��C"J��Xb~ʪwSt}6K,��q�S:9ت:���l�@�`�� �.۬�t9�S�[:��=`9N����{¿�A !R�:���6��x�0�_ �;������^���#����!����U���;0L1�����p% A��U̬ݵ��%�S��!���~`�G���� ���=4�np�3���������u�u�ٮ|%2�I��r�#0��J``8�@S@5� ���^`8E�]�.�S���7 � �0�j S�D� z���i�S�����!���l��w9*�D�I�nEX��� &A�Go�Qf��F��;���}�J����F5��Q|���X��T��y���]� o ��C=��:���PB@ D׽S�(>�C�x}`��xJЬ�۠��p+eE0`�}`A �/NE�� �9@��� H�7�!%B0`�l*��!8 2�%� �:�1�0E��ux%nP1�!�C)�P81l�ɸF#Ƭ{����B0>�� �b�`��O3��()yRpb��E.ZD8�H@% �Rx+%���c� ���f��b�d�`F�"8�XH"��-�|1�6iI, 2�$+](A*j� QT�o0.�U�`�R�}`�SN����yae�����b��o~ S)�y�@��3 �tT�0�&�+~L�f"�-|�~��>!�v��~�\Q1)}@�}h#aP72�"�$ !� " , =( &7IAXG]KgNgYvYxR"k\%w]'}h}t�h%�g+�s%r.m3ax3�x�}9��&��+�!7�0%� (�.�SD��&��;�"&ײ)׻4��6�K� �@pH,�Ȥr�l:xШtJ�Z�جv��z��xL.:��z�n���|N�����~�������& !�0`9R�}��"�"a:S�~x��������g �� E �� �������E �´��C���ǶR��D��"Ʒ�ʱH��M��GڬD�B����D��T����G���C�C� l&�~:'�tU�6ɹ#��)�'�.6�&��Ȼ K(8p0N�?!�2"��NIJX>R��OM '��2�*x�>#n� �@<[:�I�f ��T���Cdb��[�}E�5MBo��@�`@��tW-3 �x�B���jI�&E�9[T&$��ﯧ&"s��ȳ����dc�UUρ#���ldj?����`\}���u|3'�R]�6 �S#�!�FKL�*N E���`$�:e�YD�q�.�촁�s \-�jA 9�����-��M[�x(�s��x�|���p��}k�T�DpE@W� ��]k`1� ���Yb ��0l��*n0��"~zBd�~u�7�0Bl��0-�x~|U�U0 �h�*HS�|��e"#"?vp�i`e6^�+q��`m8 #V�� ��VS|`��"m"сSn|@:U���~`pb�G�ED����2F�I�? >�x� R� ��%~jx��<�a�9ij�2�D��&: Z`�]w���:�6��B�7eFJ|�ҧ�,���FǮcS�ʶ+B�,�ܺN���>PAD�HD��~���n��}�#�� Q��S���2�X�{�k�lQ�2�����w�|2� h9��G�,m���3��6-��E�L��I�³*K���q�`DwV�QXS��peS��� qܧTS����R�u �<�a�*At�lmE� � ��N[P1�ۦ��$��@`��Dpy�yXvCAy�B`}D� 0QwG#� �a[^�� $���Ǧ{L�"[��K�g�;�S~��GX.�goT.��ư��x���?1z��x~:�g�|�L� ��S`��0S]P�^p F<""�?!,�!N4&P� ����:T�@h�9%t��:�-~�I<`�9p I&.)^ 40D#p@�j4�ج:�01��rܼF2oW�#Z ;$Q q  �K��Nl#29 !F@�Bh�ᏬL!XF�LHKh�.�hE&J�G��<"WN!�����Y@� >R~19J"�2,/ &.GXB%�R�9B6�W]���W�I�$��9�RE8Y� ��"�A5�Q.axB�&ة�J�! �t)K%tS-�JF b�NMxL��)�R��"���6O!TH�H� 0 !� ) , =( &AXKgNgYvYxR"k\%wh&h}h%�g+�s%r.x3�x�}9��&��+�R,�!7�0%� (�.��5��&�a)��;�"&ף*Ȳ)ׯ7׻4�3��6�H֧KͻH�T��Y��q��h� ��pH,�Ȥr�l:xШtJ�Z�جv��z��xL.:��z�n���|N�����~�������& !�0`9R�}��"�"a:S�~x��������g �� E$����� � ����$E$��"��D� � ������R��C��� E ��H�M��G�D� �B��ϾD��a��`1r��Ӑ�� �o~�zU!L�C'�yW�UGt����ll�0���uG�)A�s[��x� �xO%��X2�  P�n:R/��aHae+�Dm?# ǣ6�8�J�x�Di�M���j���5oQ7�- <! *�l��R2r/a!l)d� A"�E���� &� ;��c �%����b��pe~C"B���H�eF2��`8qb�t_`ur`e� w�u3��Pv�h""�`�Íx�LĹ��3� �~ֺ�:���MDfJ� �۵�W�%�S�X �؁)�@��:E��w�u�Sxb8y\m�zS��Zb�E�L��w!y(>�"w�=�|��s�d �C�W)H�cC$�L �7r.�\{)@�`@ �X�$PD `aaG:���O�72E�amn]�"Rc�x�R� &dR8`g��i�xLR!�P &d����T���i�|�_ � Qi�#�`g:��:noM� :V �)p����W&a=�e�k� j���1߲s�x�W�jal|0��B0�, \j۴:6���C ��W��|��9���zĸV {�;��n��V�m�I��.��PN� ����C��+��By�ѾHŸ:��� 7�Y�FTk�SaoaY$D�S���29R�kt� ��f� ��:��Sp�3�I��DZ� �9���g��u�*3)O��[_hv ,���Et x�BH� �[��64M@�S�M7d�l�ܶ5-��U܍��z�R3Ԭ3~ ��P��5�g: ���kN�&0�j4���#{��3S�2�K�'ợl���2K{� {۶?~m𸧠�I�nE�='����^���_�=��~�#O���'���o..�Y�n��CSO��a��K��o,���b�����{�C�� "�{�K ��w��Ozdը�:$ ���v�] A#� ���a�z)Rx׿ƥ�d``�w-�y�f�K!����|��P��=�`�(f��'Pa ��BJa%��f�%`�}F����6>��`G"�}�=�!o`�^FP�ةQ�C���`(�}\�ݮ ��$<��n@dĠE#��U�I�!� #l��9`k���'Rr��Z�NB�MF �[�+9���-�wj���8�r� ,V�h"�|�S=�G_��"E� 0i*%̲��da0mVk�):;&6p>�jK ��# �D�:�c?:R Ӭf��I-�"�<�="��7�3S��c2RW ,�8(T"P0F¡Jh�" ; 403WebShell
403Webshell
Server IP : 173.249.157.85  /  Your IP : 3.14.134.206
Web Server : Apache
System : Linux server.frogzhost.com 3.10.0-1127.19.1.el7.x86_64 #1 SMP Tue Aug 25 17:23:54 UTC 2020 x86_64
User : econtech ( 1005)
PHP Version : 7.3.33
Disable Function : NONE
MySQL : OFF  |  cURL : OFF  |  WGET : ON  |  Perl : ON  |  Python : ON  |  Sudo : ON  |  Pkexec : ON
Directory :  /usr/share/perl5/vendor_perl/WWW/

Upload File :
current_dir [ Writeable ] document_root [ Writeable ]

 

Command :


[ Back ]     

Current File : /usr/share/perl5/vendor_perl/WWW/RobotRules.pm
package WWW::RobotRules;

$VERSION = "6.02";
sub Version { $VERSION; }

use strict;
use URI ();



sub new {
    my($class, $ua) = @_;

    # This ugly hack is needed to ensure backwards compatibility.
    # The "WWW::RobotRules" class is now really abstract.
    $class = "WWW::RobotRules::InCore" if $class eq "WWW::RobotRules";

    my $self = bless { }, $class;
    $self->agent($ua);
    $self;
}


sub parse {
    my($self, $robot_txt_uri, $txt, $fresh_until) = @_;
    $robot_txt_uri = URI->new("$robot_txt_uri");
    my $netloc = $robot_txt_uri->host . ":" . $robot_txt_uri->port;

    $self->clear_rules($netloc);
    $self->fresh_until($netloc, $fresh_until || (time + 365*24*3600));

    my $ua;
    my $is_me = 0;		# 1 iff this record is for me
    my $is_anon = 0;		# 1 iff this record is for *
    my $seen_disallow = 0;      # watch for missing record separators
    my @me_disallowed = ();	# rules disallowed for me
    my @anon_disallowed = ();	# rules disallowed for *

    # blank lines are significant, so turn CRLF into LF to avoid generating
    # false ones
    $txt =~ s/\015\012/\012/g;

    # split at \012 (LF) or \015 (CR) (Mac text files have just CR for EOL)
    for(split(/[\012\015]/, $txt)) {

	# Lines containing only a comment are discarded completely, and
        # therefore do not indicate a record boundary.
	next if /^\s*\#/;

	s/\s*\#.*//;        # remove comments at end-of-line

	if (/^\s*$/) {	    # blank line
	    last if $is_me; # That was our record. No need to read the rest.
	    $is_anon = 0;
	    $seen_disallow = 0;
	}
        elsif (/^\s*User-Agent\s*:\s*(.*)/i) {
	    $ua = $1;
	    $ua =~ s/\s+$//;

	    if ($seen_disallow) {
		# treat as start of a new record
		$seen_disallow = 0;
		last if $is_me; # That was our record. No need to read the rest.
		$is_anon = 0;
	    }

	    if ($is_me) {
		# This record already had a User-agent that
		# we matched, so just continue.
	    }
	    elsif ($ua eq '*') {
		$is_anon = 1;
	    }
	    elsif($self->is_me($ua)) {
		$is_me = 1;
	    }
	}
	elsif (/^\s*Disallow\s*:\s*(.*)/i) {
	    unless (defined $ua) {
		warn "RobotRules <$robot_txt_uri>: Disallow without preceding User-agent\n" if $^W;
		$is_anon = 1;  # assume that User-agent: * was intended
	    }
	    my $disallow = $1;
	    $disallow =~ s/\s+$//;
	    $seen_disallow = 1;
	    if (length $disallow) {
		my $ignore;
		eval {
		    my $u = URI->new_abs($disallow, $robot_txt_uri);
		    $ignore++ if $u->scheme ne $robot_txt_uri->scheme;
		    $ignore++ if lc($u->host) ne lc($robot_txt_uri->host);
		    $ignore++ if $u->port ne $robot_txt_uri->port;
		    $disallow = $u->path_query;
		    $disallow = "/" unless length $disallow;
		};
		next if $@;
		next if $ignore;
	    }

	    if ($is_me) {
		push(@me_disallowed, $disallow);
	    }
	    elsif ($is_anon) {
		push(@anon_disallowed, $disallow);
	    }
	}
        elsif (/\S\s*:/) {
             # ignore
        }
	else {
	    warn "RobotRules <$robot_txt_uri>: Malformed record: <$_>\n" if $^W;
	}
    }

    if ($is_me) {
	$self->push_rules($netloc, @me_disallowed);
    }
    else {
	$self->push_rules($netloc, @anon_disallowed);
    }
}


#
# Returns TRUE if the given name matches the
# name of this robot
#
sub is_me {
    my($self, $ua_line) = @_;
    my $me = $self->agent;

    # See whether my short-name is a substring of the
    #  "User-Agent: ..." line that we were passed:

    if(index(lc($me), lc($ua_line)) >= 0) {
      return 1;
    }
    else {
      return '';
    }
}


sub allowed {
    my($self, $uri) = @_;
    $uri = URI->new("$uri");

    return 1 unless $uri->scheme eq 'http' or $uri->scheme eq 'https';
     # Robots.txt applies to only those schemes.

    my $netloc = $uri->host . ":" . $uri->port;

    my $fresh_until = $self->fresh_until($netloc);
    return -1 if !defined($fresh_until) || $fresh_until < time;

    my $str = $uri->path_query;
    my $rule;
    for $rule ($self->rules($netloc)) {
	return 1 unless length $rule;
	return 0 if index($str, $rule) == 0;
    }
    return 1;
}


# The following methods must be provided by the subclass.
sub agent;
sub visit;
sub no_visits;
sub last_visits;
sub fresh_until;
sub push_rules;
sub clear_rules;
sub rules;
sub dump;



package WWW::RobotRules::InCore;

use vars qw(@ISA);
@ISA = qw(WWW::RobotRules);



sub agent {
    my ($self, $name) = @_;
    my $old = $self->{'ua'};
    if ($name) {
        # Strip it so that it's just the short name.
        # I.e., "FooBot"                                      => "FooBot"
        #       "FooBot/1.2"                                  => "FooBot"
        #       "FooBot/1.2 [http://foobot.int; foo@bot.int]" => "FooBot"

	$name = $1 if $name =~ m/(\S+)/; # get first word
	$name =~ s!/.*!!;  # get rid of version
	unless ($old && $old eq $name) {
	    delete $self->{'loc'}; # all old info is now stale
	    $self->{'ua'} = $name;
	}
    }
    $old;
}


sub visit {
    my($self, $netloc, $time) = @_;
    return unless $netloc;
    $time ||= time;
    $self->{'loc'}{$netloc}{'last'} = $time;
    my $count = \$self->{'loc'}{$netloc}{'count'};
    if (!defined $$count) {
	$$count = 1;
    }
    else {
	$$count++;
    }
}


sub no_visits {
    my ($self, $netloc) = @_;
    $self->{'loc'}{$netloc}{'count'};
}


sub last_visit {
    my ($self, $netloc) = @_;
    $self->{'loc'}{$netloc}{'last'};
}


sub fresh_until {
    my ($self, $netloc, $fresh_until) = @_;
    my $old = $self->{'loc'}{$netloc}{'fresh'};
    if (defined $fresh_until) {
	$self->{'loc'}{$netloc}{'fresh'} = $fresh_until;
    }
    $old;
}


sub push_rules {
    my($self, $netloc, @rules) = @_;
    push (@{$self->{'loc'}{$netloc}{'rules'}}, @rules);
}


sub clear_rules {
    my($self, $netloc) = @_;
    delete $self->{'loc'}{$netloc}{'rules'};
}


sub rules {
    my($self, $netloc) = @_;
    if (defined $self->{'loc'}{$netloc}{'rules'}) {
	return @{$self->{'loc'}{$netloc}{'rules'}};
    }
    else {
	return ();
    }
}


sub dump
{
    my $self = shift;
    for (keys %$self) {
	next if $_ eq 'loc';
	print "$_ = $self->{$_}\n";
    }
    for (keys %{$self->{'loc'}}) {
	my @rules = $self->rules($_);
	print "$_: ", join("; ", @rules), "\n";
    }
}


1;

__END__


# Bender: "Well, I don't have anything else
#          planned for today.  Let's get drunk!"

=head1 NAME

WWW::RobotRules - database of robots.txt-derived permissions

=head1 SYNOPSIS

 use WWW::RobotRules;
 my $rules = WWW::RobotRules->new('MOMspider/1.0');

 use LWP::Simple qw(get);

 {
   my $url = "http://some.place/robots.txt";
   my $robots_txt = get $url;
   $rules->parse($url, $robots_txt) if defined $robots_txt;
 }

 {
   my $url = "http://some.other.place/robots.txt";
   my $robots_txt = get $url;
   $rules->parse($url, $robots_txt) if defined $robots_txt;
 }

 # Now we can check if a URL is valid for those servers
 # whose "robots.txt" files we've gotten and parsed:
 if($rules->allowed($url)) {
     $c = get $url;
     ...
 }

=head1 DESCRIPTION

This module parses F</robots.txt> files as specified in
"A Standard for Robot Exclusion", at
<http://www.robotstxt.org/wc/norobots.html>
Webmasters can use the F</robots.txt> file to forbid conforming
robots from accessing parts of their web site.

The parsed files are kept in a WWW::RobotRules object, and this object
provides methods to check if access to a given URL is prohibited.  The
same WWW::RobotRules object can be used for one or more parsed
F</robots.txt> files on any number of hosts.

The following methods are provided:

=over 4

=item $rules = WWW::RobotRules->new($robot_name)

This is the constructor for WWW::RobotRules objects.  The first
argument given to new() is the name of the robot.

=item $rules->parse($robot_txt_url, $content, $fresh_until)

The parse() method takes as arguments the URL that was used to
retrieve the F</robots.txt> file, and the contents of the file.

=item $rules->allowed($uri)

Returns TRUE if this robot is allowed to retrieve this URL.

=item $rules->agent([$name])

Get/set the agent name. NOTE: Changing the agent name will clear the robots.txt
rules and expire times out of the cache.

=back

=head1 ROBOTS.TXT

The format and semantics of the "/robots.txt" file are as follows
(this is an edited abstract of
<http://www.robotstxt.org/wc/norobots.html>):

The file consists of one or more records separated by one or more
blank lines. Each record contains lines of the form

  <field-name>: <value>

The field name is case insensitive.  Text after the '#' character on a
line is ignored during parsing.  This is used for comments.  The
following <field-names> can be used:

=over 3

=item User-Agent

The value of this field is the name of the robot the record is
describing access policy for.  If more than one I<User-Agent> field is
present the record describes an identical access policy for more than
one robot. At least one field needs to be present per record.  If the
value is '*', the record describes the default access policy for any
robot that has not not matched any of the other records.

The I<User-Agent> fields must occur before the I<Disallow> fields.  If a
record contains a I<User-Agent> field after a I<Disallow> field, that
constitutes a malformed record.  This parser will assume that a blank
line should have been placed before that I<User-Agent> field, and will
break the record into two.  All the fields before the I<User-Agent> field
will constitute a record, and the I<User-Agent> field will be the first
field in a new record.

=item Disallow

The value of this field specifies a partial URL that is not to be
visited. This can be a full path, or a partial path; any URL that
starts with this value will not be retrieved

=back

Unrecognized records are ignored.

=head1 ROBOTS.TXT EXAMPLES

The following example "/robots.txt" file specifies that no robots
should visit any URL starting with "/cyberworld/map/" or "/tmp/":

  User-agent: *
  Disallow: /cyberworld/map/ # This is an infinite virtual URL space
  Disallow: /tmp/ # these will soon disappear

This example "/robots.txt" file specifies that no robots should visit
any URL starting with "/cyberworld/map/", except the robot called
"cybermapper":

  User-agent: *
  Disallow: /cyberworld/map/ # This is an infinite virtual URL space

  # Cybermapper knows where to go.
  User-agent: cybermapper
  Disallow:

This example indicates that no robots should visit this site further:

  # go away
  User-agent: *
  Disallow: /

This is an example of a malformed robots.txt file.

  # robots.txt for ancientcastle.example.com
  # I've locked myself away.
  User-agent: *
  Disallow: /
  # The castle is your home now, so you can go anywhere you like.
  User-agent: Belle
  Disallow: /west-wing/ # except the west wing!
  # It's good to be the Prince...
  User-agent: Beast
  Disallow:

This file is missing the required blank lines between records.
However, the intention is clear.

=head1 SEE ALSO

L<LWP::RobotUA>, L<WWW::RobotRules::AnyDBM_File>

=head1 COPYRIGHT

  Copyright 1995-2009, Gisle Aas
  Copyright 1995, Martijn Koster

This library is free software; you can redistribute it and/or
modify it under the same terms as Perl itself.

Youez - 2016 - github.com/yon3zu
LinuXploit