ویکی‌پدیا:درخواست‌های ربات/ربات میان‌ویکی یک‌طرفه

این ربا ت از ابتدای ویکی‌فا همه مقالات را اسکن میکند و مقالاتی را که در ویکی‌فا میان‌ویکی دارند ولی در ویکی‌انگلیسی پیوند به ویکی‌فا را ندارند را در یک فایل متنی ذخیره میکند <syntaxhighlight lang="python">

  1. !/usr/bin/python
  2. -*- coding: utf-8 -*-
  3. Reza(User:reza1615), 2011
  4. Distributed under the terms of the CC-BY-SA 3.0 .

import wikipedia,sys import pagegenerators import re, os, codecs, catlib,pprint wikipedia.config.put_throttle = 0 wikipedia.put_throttle.setDelay() enlink,falink = u' ',u' ' startpage='!' enWikisrcSite = wikipedia.getSite( 'fa' ) msg=u'ربات'

def run( self ):

       trovato_en = False
       sen = wikipedia.Site( 'fa' )
       interwiki_list = []
       for page in self.generator:
           print page
           try:
               try:
                   if not page.canBeEdited():
                       wikipedia.output( u'Skipping locked page %s' % page.title() )
                       continue
                   atext = page.get() # Taking the text of the page
               except wikipedia.NoPage: # First except, prevent empty pages
                       continue
               except wikipedia.IsRedirectPage: # second except, prevent redirect
                       wikipedia.output( u'%s is a redirect!' % pagename )
                       continue
               except wikipedia.Error: # third exception, take the problem and print
                       wikipedia.output( u"Some error, skipping.." )
                       continue     

               if atext.find( u'[[en:' ) == -1:
                       continue    
               else:
                       enlink = atext.split( '[[en:' )[1]
                       enlink = enlink.split( ']]' )[0]
                       site = wikipedia.getSite( u'en' )
               try:
                           enpage = wikipedia.Page( site,enlink )
                           text = enpage.get()
               except wikipedia.IsRedirectPage:
                           enpage = enpage.getRedirectTarget()
               try:
                   text = enpage.get()
               except wikipedia.NoPage: # First except, prevent empty pages
                       continue
               except wikipedia.Error: # third exception, take the problem and print
                       continue
               except:
                       continue


               if text.find( u'[[fa:' )>0:
                           continue
               page=str(page).replace('','').replace('',)
               textresult = 'en:'+enlink+' ==> '+'fa:'+page+'\n'
               print textresult 
               with codecs.open( 'resultr.txt',mode = 'a',encoding = 'utf8' ) as f:
                                  f.write( textresult )  
               f.close()
           except:
               continue       

class Boteditor:

   def __init__( self,generator,autoTitle = False,autoText = False ):
       self.generator = generator


def main():

   gen = None
   # summary message
   summary_commandline = None
   # Don't edit pages which contain certain texts.
   exceptions = []
   # commandline paramater.
   # Which namespaces should be processed?
   # default to [] which means all namespaces will be processed
   namespaces = []
   template = None
   PageTitles = []
   autoText = False
   autoTitle = False
   bloga=False
   sourcea=False
   # This factory is responsible for processing command line arguments
   # that are also used by other scripts and that determine on which pages
   # to work on.
   genFactory = pagegenerators.GeneratorFactory()
   # Load default summary message.
   # BUG WARNING: This is probably incompatible with the -lang parameter.
   wikipedia.setAction( msg )

   # Read commandline parameters.
   #-------------------------------------------------------------------------------------------------
   for arg in wikipedia.handleArgs():
       if arg == '-autotitle':
           autoTitle = True
       elif arg == '-autotext':
           autoText = True
       elif arg.startswith( '-page' ):
           if len( arg ) == 5:
               PageTitles.append( wikipedia.input( u'Which page do you want to chage?' ) )
           else:
               PageTitles.append( arg[6:] )
       elif arg.startswith( '-except:' ):
           exceptions.append( arg[8:] )
       elif arg.startswith( '-blog:' ):
           bloga=True
       elif arg.startswith( '-source:' ):
           sourcea=True
       elif arg.startswith( '-template:' ):
           template = arg[10:]
       elif arg.startswith( '-namespace:' ):
           namespaces.append( int( arg[11:] ) )
       elif arg.startswith( '-summary:' ):
           wikipedia.setAction( arg[9:] )
           summary_commandline = True
       else:
           generator = genFactory.handleArg( arg )
           if generator:
               gen = generator
   print namespaces

   if PageTitles:
       pages = [wikipedia.Page( wikipedia.getSite(),PageTitle ) for PageTitle in PageTitles]
       gen = iter( pages )
   if not gen:
       # syntax error, show help text from the top of this file
       wikipedia.showHelp( 'behsaz' )
       wikipedia.stopme()
       sys.exit()
   if namespaces != []:
       gen = pagegenerators.NamespaceFilterPageGenerator( gen,namespaces )
   #gen = pagegenerators.RedirectFilterPageGenerator(gen)

  1. -------------------------------------------------------------------------------------------------
   preloadingGen = pagegenerators.PreloadingGenerator( gen,pageNumber = 60 )
   bot = Boteditor( preloadingGen,autoTitle,autoText)
   run(bot)

if __name__ == "__main__":

   bloga=False
   sourcea=False
   main()