changeset 69:7ee3a8421936

merged hychen's proposal.
author "Rex Tsai <chihchun@kalug.linux.org.tw>"
date Wed, 22 Oct 2008 06:25:16 +0800
parents 4ba1e981716d (diff) d26eea95c52d (current diff)
children 142d888af2d1
files
diffstat 17 files changed, 611 insertions(+), 26 deletions(-) [+]
line wrap: on
line diff
--- a/README	Tue Oct 21 01:36:28 2008 +0800
+++ b/README	Wed Oct 22 06:25:16 2008 +0800
@@ -15,13 +15,17 @@
 
 = HOWTO =
  * Install *many* perl modules. 
-libclass-dbi-autoloader-perl - Class::DBI::AutoLoader
-libclass-dbi-perl - Class::DBI
-libclass-dbi-sqlite-perl - Class::DBI::SQLite
-libhtml-tagparser-perl - HTML::TagParser;
-libio-compress-zlib-perl - IO::Uncompress::Gunzip
-libwww-perl - LWP
-libwww-mechanize-perl - WWW::Mechanize
+Carp::Assert - libcarp-assert-perl
+Carp::Assert::More - libcarp-assert-more-perl
+Class::DBI::AutoLoader - libclass-dbi-autoloader-perl
+Class::DBI - libclass-dbi-perl
+Class::DBI::SQLite - libclass-dbi-sqlite-perl
+Decision::ParseTree - libdecision-parsetree-perl
+HTML::TagParser; - libhtml-tagparser-perl
+IO::Uncompress::Gunzip - libio-compress-zlib-perl
+LWP - libwww-perl
+WWW::Mechanize - libwww-mechanize-perl
+YAML - libyaml-perl
 
   You can download missed debian packages from http://www.assembla.com/spaces/eagle-eye/documents
   
@@ -30,13 +34,13 @@
  * Create the config file, at ${HOME}/.eagleeye.pm. The file content are
 package main;
 
-$::server = 's2.ikariam.tw';
+$::server = 's4.ikariam.tw'; # Delta server.
 $::user = "chihchun";
 $::pass = "YOUKNOW";
 
 1;
 
- * Run the bot, 'perl inference.pl'
- * Update the islands by runing 'perl scan.pl'
+ * Run the bot, 'perl agent.pl'
+ * Update the islands information by runing 'perl scan.pl'
  * List sheeps, 'perl sheep.pl'
 
--- a/agent.pl	Tue Oct 21 01:36:28 2008 +0800
+++ b/agent.pl	Wed Oct 22 06:25:16 2008 +0800
@@ -16,7 +16,7 @@
     return bless $self, $class;
 }
 
-sub is_Attacked {
+sub is_attacked {
     my ($self, $city) = @_;
     return ($city->{force}->{attacks} > 0 ) ? 1 : 0;
 }
@@ -43,8 +43,7 @@
     return 0;
 }
 
-sub is_happiness
-{
+sub is_happiness {
     my ($self, $city) = @_;
     # TODO 以 fuzzy 取出合理 happiness 值
     return ($city->{happiness} >= 2 ?  1 : 0)
@@ -94,6 +93,26 @@
     return 0;
 }
 
+sub is_expansion_researched {
+    my ($self, $city) = @_;
+    return (defined($city->{research}->{1030}) ?  1 : 0);
+}
+
+sub is_wealth_researched {
+    my ($self, $city) = @_;
+    return (defined($city->{research}->{2030}) ?  1 : 0);
+}
+
+sub is_professionalarmy_researched {
+    my ($self, $city) = @_;
+    return (defined($city->{research}->{4030}) ?  1 : 0);
+}
+
+sub is_paper_researched {
+    my ($self, $city) = @_;
+    return (defined($city->{research}->{3020}) ?  1 : 0);
+}
+
 sub is_drydock_researched {
     my ($self, $city) = @_;
     return (defined($city->{research}->{4010}) ?  1 : 0);
@@ -104,6 +123,25 @@
     return (defined($city->{research}->{2040}) ?  1 : 0);
 }
 
+sub is_invention_researched {
+    my ($self, $city) = @_;
+    return (defined($city->{research}->{3040}) ?  1 : 0);
+}
+
+sub is_barracks_level_enough {
+    my ($self, $city) = @_;
+    return 0 if(!defined($city->{building}->{barracks}));
+    return ($city->{building}->{barracks} >= 3 ? 1 : 0);
+    return 0;
+}
+
+sub is_shipyard_level_enough {
+    my ($self, $city) = @_;
+    return 0 if(!defined($city->{building}->{shipyard}));
+    return ($city->{building}->{shipyard} >= 2 ? 1 : 0);
+    return 0;
+}
+
 sub rule_engagement
 {
     my ($self, $city) = @_;
@@ -158,7 +196,7 @@
         }
     }
     # Debug
-    print(Dumper($cities->{$cityId}->{parse_path}));
+    # print(Dumper($cities->{$cityId}->{parse_path}));
 }
 
 $i->logout;
--- a/building.yaml	Tue Oct 21 01:36:28 2008 +0800
+++ b/building.yaml	Wed Oct 22 06:25:16 2008 +0800
@@ -1,17 +1,35 @@
 ---
 # 基本建設規則
 # 檢查是否被攻擊
-- is_Attacked:
+- is_attacked:
     # we are in Peace :D
     0:
       - is_constructing:
          # already building something.
          1:
            # adjust human resources
+           # 
+           # 相關基礎研究 (technologies tree)
+           # 財富
+            - is_wealth_researched:
+               0: research_economy
+            # 擴張
+            - is_expansion_researched:
+               0: research_seafaring
+            # 造紙
+            - is_paper_researched:
+               0: research_knowledge
+            # 正規軍
+            - is_professionalarmy_researched:
+               0: resaerch_military
+            # 釀酒
+            - is_winepress_researched:
+               0: research_economy
+            # 發明
+            - is_invention_researched:
+               0: research_knowledge
          # let's find something to build up
          0:
-            # 確認是否為主城
-            # 確認是否有學院
             - is_gold_enoughforcargo:
                1: increaseTransporter
             - is_wall_enough:
@@ -30,15 +48,58 @@
                   - is_shipyard:
                      0: 
                         - is_drydock_researched: 
-                           0: resaerch_drydock
+                           0: resaerch_military
                            1: build_shipyard
+                           # build one board
             - is_happiness:
                0: 
                   - is_winepress_researched:
-                     0: research_economy
+                     # 0: research_economy
                      1: build_tavern
-                     # reduce_trvern
-            # 是主城, 可隨意建任一尚未搭建之建築
+
+            # TODO
+            # http://ikariam.wikia.com/wiki/List_of_buildings
+            # http://ikariam.wikia.com/wiki/Technology_Tree
+            # is_conservation_researched
+            #    -build_warehouse
+            # build_academy
+            # build_palace (Expansion, Capital only)
+            # build_embassy (副城, 不建)
+
+            - is_professionalarmy_researched:
+               1:
+                  - is_shipyard_level_enough:
+                     - build_shipyard
+            - is_professionalarmy_researched:
+               1:
+                  - is_barracks_level_enough:
+                     - build_barracks
+            # is_invention_researched
+            # biuld_workshop 
+            # biuld_hideout
+            #
+            # build_museum
+
+            # 皇宮
             # 副城, 不建 ...
     # 採取防禦措施
     1: run_defense
+    # 若軍分 0, 進行焦土政策 針對特定城市 錢全部買船 拆港
+    # increaseTransporter
+    # destroy TradingPort
+    #
+    # 計算出兵數量、到達時間與調動時間
+    # 調動軍隊防護 (加入封鎖列表)
+
+    # (保留兵力) 出兵對方城鎮, 拆港, keep gold
+    # (保留兵力) 出兵任一城鎮, 拆港, keep gold
+
+# blocking
+# 計算軍方比例
+# 對方軍分 0, # 海軍一日三次, 每次八小時 # 陸軍每日三次, 八小時一次
+# 對方平等軍分, 半夜偷襲
+# 對方高軍分 # 累積分數
+#
+# balance resources
+# 從其他城移動資源到特定城市
+# 預先計算可能需要的資源
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikb/Parser.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,51 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+import re,string
+from sgmllib import SGMLParser  
+
+class ContentParser(SGMLParser):
+    def __init__(self):
+        SGMLParser.__init__(self)
+        self.anchor =  {'link':'', 'title':''}
+        self.anchorlist = []
+	self.liattr={}
+        self.inside_elements=['site']
+	self.pat=re.compile('\r|\t|\n')
+
+    def start_a(self, attributes):
+        """For each anchor tag, pay attention to the href and title attributes."""
+        href, title = '', ''
+        for name, value in attributes:
+            if name.lower() == 'href': href = value
+            if name.lower() == 'title': title = value
+        self.anchor['link'] = href
+        self.anchor['title'] = title
+        self.inside_elements.append('anchor')
+
+    def end_a(self):
+        self.anchorlist.append(self.anchor) # store the anchor in a list 
+        self.anchor = {'link':'', 'title':''}   # reset the dictionary,  
+        self.inside_elements.pop()
+
+    def handle_data(self, text):
+        if self.inside_elements[-1]=='anchor':
+            self.anchor['title'] = text
+	if self.inside_elements[-1]=='li':
+	    text=self.pat.sub(' ',text)
+	    text=string.split(text," ")
+	    if self.liattcl in self.liattr:
+	    	self.liattr[self.liattcl]=self.liattr[self.liattcl]+text
+	    else:
+	        self.liattr[self.liattcl]=text
+
+    def start_li(self,attributes):
+	self.liattcl=''
+        attrs = dict(attributes)
+	if attrs.has_key('class'):
+	     	self.liattcl=attrs['class']
+		self.inside_elements.append('li')
+
+    def end_li(self):
+	if self.inside_elements[-1]=='li':
+	    self.inside_elements.pop()
+	
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikb/ikariam.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,85 @@
+#!/usr/bin/python
+# -*- coding: utf-8 -*-
+import os,sys,re,string
+import cookielib,urllib2,urllib # for urlencode
+import time
+from lconf import LoadConfigfile
+from Parser import ContentParser
+
+class connection(object):
+    def __init__(self):
+	self.page=''
+	self.confdata=LoadConfigfile().cd
+	self.baseurl='http://'+self.confdata['server']
+        self.COOKIEFILE = '/tmp/ikcookies.lwp'
+	self.cj = cookielib.LWPCookieJar()
+	if os.path.isfile(self.COOKIEFILE):
+	    self.cj.load(self.COOKIEFILE)
+	opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
+	opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.8.1.12pre) Gecko/20071220 BonEcho/2.0.0.12pre')]
+	urllib2.install_opener(opener)
+
+    def login(self):
+        if not os.path.isfile(self.COOKIEFILE):
+	    print "create cookie file"+self.COOKIEFILE
+	    params = {"universe":self.confdata['server'], \
+	    "name":self.confdata['user'], \
+	    "password":self.confdata['pass']}
+
+            data = urllib.urlencode(params)
+            self.page=urllib2.urlopen(self.baseurl+'/index.php?action=loginAvatar&function=login',data).read()
+	self.cj.save(self.COOKIEFILE)
+	return 1
+
+    def parser(self):
+        parser=ContentParser()
+        parser.feed(self.page)
+        parser.close
+	for x in parser.liattr.keys():
+	    print x,parser.liattr[x]
+	#parser.anchorlist:
+
+    def logout(self):
+        logout=urllib2.urlopen(self.baseurl+'/index.php?action=loginAvatar&function=logout').read()
+	os.remove(self.COOKIEFILE)
+	return 1
+
+    def plunder(self):
+    	'/index.php?view=plunder&destinationCityId=1978'
+
+
+    def upgradetest(self):
+        urllib2.urlopen(self.baseurl+'/index.php?view=academy&id=117257&position=9').read()
+	params = {"action":'CityScreen', \
+	          "function":'upgradeBuilding', \
+		  "id":'117257',\
+		  "position":'9',\
+		  "level":'7',\
+		  "oldView":'academy'}
+	print urllib2.urlopen(self.baseurl+'/index.php?view=townHall&id=117257&position=0#upgrade',urllib.urlencode(params)).read()
+	return 1
+
+def help():
+        print ("Usage: %s [Option] [Channel] [second]") % os.path.basename(sys.argv[0])
+        print ("Option: ")
+	helplist=[
+	("-h","--help","show this usage message."),
+	("-g","--game","Login to the game")
+	]
+	helplist.sort()
+	for x in helplist:
+	    print ("\t%2s, %-25s %s" % x)
+
+if __name__=='__main__':
+    if len(sys.argv) == 1:
+	help()
+	sys.exit(2) # common exit code for syntax error
+    else:
+	arglist=sys.argv[1:]
+	if arglist[0] in ('--game','-g'):
+	     gc=connection()
+	     gc.login()
+	     gc.parser()
+	     gc.logout()
+
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikb/lconf.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,15 @@
+import os,string
+class LoadConfigfile(object):
+    def __init__(self):
+	profile = os.environ["HOME"]+'/.eagleeye.pm'
+	self.cd={}
+	if os.path.isfile(profile):
+	    print "Loading Config file."
+	    cfile=open(profile,'r')
+	    for line in cfile.xreadlines():
+	    	if line[0:3]=='$::':
+		   con=string.split(line[3:-2])
+		   self.cd[con[0]]=con[2][1:-1]	   
+	else:
+	    print "File don't exist."
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/__init__.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,54 @@
+from lazy.www import c
+from lconf import LoadConfigfile
+import cookielib
+import os
+import urllib2
+import urllib
+class Ikariam:
+
+    cities = {}
+
+    def __init__(self):
+        self.COOKIEFILE = '/tmp/ikariam.lwp'
+	self.confdata=LoadConfigfile().cd
+        self.baseurl='http://'+self.confdata['server']
+        self.cj = cookielib.LWPCookieJar()
+        if os.path.isfile(self.COOKIEFILE):
+            self.cj.load(self.COOKIEFILE)
+ 
+        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(self.cj))
+        opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.8.1.12pre) Gecko/20071220 BonEcho/2.0.0.12pre')]
+        urllib2.install_opener(opener)
+
+        self.login()
+
+    def login(self):     
+	print "login to %s...." % self.confdata['server']
+        params = {"universe":self.confdata['server'], \
+        "name":self.confdata['user'], \
+        "password":self.confdata['pass']}
+        ret = c(self.baseurl+'/index.php?action=loginAvatar&function=login').get(params).get_content()
+        self.cj.save(self.COOKIEFILE)
+        
+    def logout(self):
+	print "logut from %s...." % self.confdata['server']
+        c(self.baseurl+'/index.php?action=loginAvatar&function=logout')
+        os.remove(self.COOKIEFILE)
+        
+    def city(self, id):
+	return self.cities.get(id, IkariamCity(id=id, core=self) )
+    
+class IkariamCity:
+    
+    def __init__(self, id, core ):
+        self.core = core
+        self.id = id
+        self.params = {'view':'city','id':id}
+        
+    def sync(self):
+	print "pull datas of the city %s" % self.id
+        xpath_globalinfo = "/html/body[@id='city']/div[@id='container']/div[@id='container2']/div[@id='globalResources']/ul"
+
+        xpath_gold = xpath_globalinfo + "/li[2]/a/span[@id='value_gold']/text()"
+        self.gold = c(self.core.baseurl).get(self.params).find(xpath_gold).get_content()[0]
+        
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/example.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,6 @@
+from __init__ import Ikariam
+
+i = Ikariam()
+city = i.city(117261)
+city.sync()
+print 'gold is'+city.gold
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/README	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,18 @@
+Requirements:
+
+	lxml - python libxml binding
+
+	it needs to installing the following packages before install lxml.
+
+    *  libxml 2.6.21 or later. It can be found here: http://xmlsoft.org/downloads.html
+    *  libxslt 1.1.15 or later. It can be found here: http://xmlsoft.org/XSLT/downloads.html
+		
+	If you use Ubuntu, here is what you need to do.
+	
+	$ apt-get install libxml2-dev libxslt1-dev
+	$ eazy_install lxml
+
+Example:
+
+	product = c('http://www.google.com.tw').find("/foo/bar/").working_prodcut
+	print product.content
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/__init__.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,64 @@
+# -*- coding: utf-8 -*-
+#
+# Copyright 2008 Hsin Yi, Chen
+"""
+    [Note] the project is not available yet.
+
+    A web page fetcing tool chain that has a JQuery-like selector and supports chain working.
+    
+    Here is an exmaple can show the the main idea, To restrive a content you want
+    in a div box in a web page, and then post and restrive next content in the other
+    web page with the param you just maked from the content in first restriving.
+    finally, storage the production.
+    
+    def func(s):
+        return {'msg':s}
+    
+    try:
+        c("http://example.tw/").get().find("#id > div") \
+            .build_param( func ).post_to("http://example2.com") \
+            .save_as('hellow.html')
+    except:
+        pass
+        
+    more complex example
+        
+    try:
+        c("http://example.tw/").retry(4, '5m').get() \
+            .find("#id > div"). \
+            .build_param( func ).post_to("http://example2.com") \
+            .save_as('hellow.html') \
+            .end().find("#id2 > img").download('pretty-%s.jpg'). \
+            tar_and_zip("pretty_girl.tar.gz")
+    except NotFound:
+        print "the web page is not found."
+    except NoPermissionTosave:
+        print "the files can not be save with incorrect permission."
+    else:
+        print "unknow error."
+"""
+from lazy.www.work import WorkFlow
+from lazy.www.work.fetch import Fetcher, install_opener
+from lazy.www.core import SemiProduct
+
+def c(url):
+    """
+    connect to a web apge
+    
+    >>> c('http://localhost:8080').get().worker.working_product.content
+    'It works!!\\n'
+    
+    >>> c('http://localhost:8080').get().find('//text()')
+    'It works!!\\n'    
+    """
+    s= SemiProduct(source=url)    
+    w = WorkFlow(Fetcher(s))
+    return w
+
+def lz_install(**kwds):
+    if('opener' == kwds.get('name')):
+       install_opener(kwds.get('cookiefile'))
+
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/core/__init__.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,12 @@
+                          
+class SemiProduct:
+
+    last_work = None
+    source = None
+    content = None
+    
+    def __init__(self, **kwds):
+        self.source = kwds.get('source','')        
+        
+    def __str__(self):        
+        return self.content
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/core/utils.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,4 @@
+
+def mix_in(py_class, mixin_class):
+    if mixin_class not in py_class.__bases__:
+        py_class.__bases__ += (mixin_class,)
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/work/__init__.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,52 @@
+from lazy.www.work.fetch import Fetcher
+from lazy.www.work.find import Finder
+from lazy.www.core import SemiProduct
+class WorkFlow:
+    
+    serial_number = 0
+    working_product = None
+    worker = None
+    
+    def __init__(self, worker):
+        self.set_worker(worker)
+
+    def set_worker(self, worker):
+        self.worker = worker
+        if self.worker.working_product is None:
+            self.working_product = SemiProduct()
+        else:
+            self.working_product = self.worker.working_product
+    
+    def get_content(self):
+        return self.working_product.content
+     
+    def change_worker(self, new_worker):
+        self.serial_number += 1
+        self.worker = new_worker
+        
+    def is_fetcher(self, obj):
+        if  obj is not None:    return True
+    
+    def get(self, data = {} ):
+        if not self.is_fetcher(self.worker) :
+            self.change_worker( Fetcher(self.working_product) )
+        
+        self.working_product.content = self.worker.get(data)
+        return self
+            
+    def post(self, data = {} ):
+        if not self.is_fetcher(self.worker):
+            self.change_worker( Fetcher(self.working_product) )
+        
+        self.working_product.content = self.worker.post(data)
+        return self
+    
+    def is_finder(self, obj):
+        if obj is not None: return True
+    
+    def find(self, express):
+        #if not self.is_finder(self.worker):
+        self.worker = Finder(self.working_product)
+        self.working_product.content = self.worker.find(express)
+        
+        return self
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/work/fetch.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,83 @@
+import urllib2
+import urllib
+import cookielib
+import os
+
+def install_opener(cookiefile):
+    COOKIEFILE = cookiefile
+    cj = cookielib.LWPCookieJar()
+    if os.path.isfile(COOKIEFILE):
+        cj.load(COOKIEFILE)
+    else:
+        cj.save(cookiefile)
+        opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
+        opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows; U; Windows NT 5.1; zh-TW; rv:1.8.1.12pre) Gecko/20071220 BonEcho/2.0.0.12pre')]
+        urllib2.install_opener(opener)
+
+class Fetcher:
+    
+    opener = None
+    
+    working_product = None
+    
+    """
+    A Semi Production Decoration for content fetching.
+    
+    handles content restriving.
+    
+    >>> o = Fetcher( SemiProduct(source="http://localhost:8080") )
+    >>> o.get().working_product.content
+    'It works!!\\n'
+    """
+    def __init__(self, working_product):
+        self.working_product = working_product
+        
+    def get(self, data = {}):
+        """        
+        send datas via http get method.
+        """        
+        res = urllib2.urlopen(self.working_product.source, urllib.urlencode(data))
+        return res.read()
+    
+    def post(self, data = {} ):
+        """
+        send datas via http post method.
+        
+        >>> o = Fetcher( SemiProduct(source="http://localhost:8080") )
+        >>> o.post({'a':'b'}).working_product.content
+        'It works!!\\n'
+        """        
+        res = urllib2.urlopen(self.working_product.source, urllib.urlencode(data))
+        return res.read()    
+
+    def refer(self, refer_url):
+        """
+        refer getter/setter.
+
+        >>> o = Fetcher( SemiProduct(source="http://localhost:8080") )
+        >>> o.refer('http://www.example.com')        
+        """
+        raise NotImplementedError
+
+    def retry(self, count = 0, intval = 0, timeout = 0):
+        """
+        retry to fetch the content.
+
+        >>> o = Fetcher( SemiProduct(source="http://localhost:8080") )
+        >>> o.retry(4)        
+        """        
+        raise NotImplementedError
+    
+class Retry:
+    
+    """
+     A Fetcher Decoration for retry goal.
+     
+     
+    """
+    def __init__(self, fetcher):
+        raise NotImplementedError
+    
+if __name__ == '__main__':
+    import doctest
+    doctest.testmod()
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lazy/www/work/find.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,23 @@
+from lxml import etree
+from cStringIO import StringIO
+
+class Finder:
+
+    dom_tree = None
+    xpath = None
+
+    def __init__(self, working_product):
+        self.working_prodcut = working_product
+
+        self.encoding = 'utf8'
+        parser = etree.HTMLParser(encoding=self.encoding)
+        self.dom_tree = etree.parse(StringIO(self.working_prodcut.content), parser)
+    
+    def find(self, express , callback = None):
+        xpath = self.dom_tree.xpath(express)
+        
+        if callback is None:
+            ret = xpath
+        else:
+            ret = self.callback(xpath)
+        return ret
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/pyikriam/lconf.py	Wed Oct 22 06:25:16 2008 +0800
@@ -0,0 +1,15 @@
+import os,string
+class LoadConfigfile(object):
+    def __init__(self):
+	profile = os.environ["HOME"]+'/.eagleeye.pm'
+	self.cd={}
+	if os.path.isfile(profile):
+	    print "Loading Config file."
+	    cfile=open(profile,'r')
+	    for line in cfile.xreadlines():
+	    	if line[0:3]=='$::':
+		   con=string.split(line[3:-2])
+		   self.cd[con[0]]=con[2][1:-1]	   
+	else:
+	    print "File don't exist."
+
--- a/sheep.pl	Tue Oct 21 01:36:28 2008 +0800
+++ b/sheep.pl	Wed Oct 22 06:25:16 2008 +0800
@@ -37,7 +37,7 @@
         SELECT user.id 
           FROM user, cities 
          WHERE user.id == cities.user 
-           AND user.trader_score_secondary >= 20
+           AND user.trader_score_secondary >= 200
            AND user.army_score_main <= 100
            AND cities.island IN (SELECT island.id FROM island WHERE island.x <= ? AND island.x >= ? AND island.y <= ? AND island.y >= ? )
     }
@@ -73,18 +73,18 @@
             next if($c->status eq 'v');
 
             unless($c->status eq 'i') {
-                next if($members > 3);
+                next if($members > 5);
             }
 
             my $island = Ikariam::Island->retrieve($c->island);
 
             # 所得金錢 = 對方城鎮等級x(對方城鎮等級-1)x對方金錢/10000
-            my $robbery = $c->citylevel * ($c->citylevel - 1) * $sheep->trader_score_secondary / 10000;
+            my $capture = $c->citylevel * ($c->citylevel - 1) * $sheep->trader_score_secondary / 10000;
 
-            next if($robbery < 2000);
+            next if($capture < 100);
 
             $line = sprintf("%d %s army %d %s/%s(%d),", 
-                $robbery,
+                $capture,
                 $c->status, $sheep->army_score_main, $sheep->name, $sheep->ally, $members);
 
             $line .= sprintf("\"%s\" %d [%d,%d] %s http://s2.ikariam.tw/index.php?view=island&id=%d&selectCity=%d\n",