• R/O
  • HTTP
  • SSH
  • HTTPS

Frequently used words (click to add to your profile)

javac++androidlinuxc#windowsobjective-ccocoa誰得qtpythonphprubygameguibathyscaphec計画中(planning stage)翻訳omegatframeworktwitterdomtestvb.netdirectxゲームエンジンbtronarduinopreviewer

sfjplib for python


File Info

Rev. f6bf83ccbc242faf3fc6fcd7916144eaee220512
Tamanho 2,944 bytes
Hora 2011-08-25 20:46:40
Autor Hiromichi MATSUSHIMA
Mensagem de Log

add some files

Content

#!/usr/bin/python
# -*- coding: utf-8 -*-
"""sfjplib.py"""

import crawlerlib
import urllib
import htmltree
import re

def login(uname, passwd):
    """
    login to sfjp
    uname: username
    passwd: password
    """
    u = SfjpUser(uname, passwd)
    u.login()
    return u


class SfjpUser(crawlerlib.CrawlUser):
    """sfjp user class"""
    login_url = "https://sourceforge.jp/account/login.php"
    def __init__(self, uname="", passwd=""):
        self._uname = uname
        self._passwd = passwd

    def login(self):
        params = urllib.urlencode({
                "return_to": "/my",
                "login": 1,
                "form_loginname": self._uname,
                "form_pw": self._passwd,
                "submit": "ログイン"
                })
        return crawlerlib.CrawlUser.login(self, params)


class docman2(object):
    """docman2 manipulation library"""
    def __init__(self, sfjp_user):
        self._user = sfjp_user

    def retrive(self, gid, cid):
        """
        retrive document from docman
        gid: group_id
        cid: document_content_id
        """
        c = crawlerlib.Crawler(self._user)
        url = "http://sourceforge.jp/docman2/EditDocument.php?group_id=%s&document_content_id=%s" % (gid, cid)
        res = c.get(url)
        return res.read()


class Wiki(object):
    """Wiki manipulation library"""
    def __init__(self, sfjp_user = None):
        self._user = sfjp_user

    def retrive_wikitext(self, project_uid, name):
        c = crawlerlib.Crawler(self._user)
        url = "http://sourceforge.jp/projects/%s/wiki/%s?action=edit" % (project_uid, name)
        res = c.get(url)
        html = res.read()

        tree = htmltree.parse(html).root()
        title = tree.get_element_by_id("title").attr("value")
        wikitext = tree.get_element_by_id("text").inner_text()
        comment = tree.get_element_by_id("comment").attr("value")

        # get postkey
    #document.write('<inp'+'ut type="hidden" name="postkey" value="Alqh7yg">');

        m = re.search(r'''type="hidden" name="postkey" value="(.*?)"''', html)
        if m:
            postkey = m.group(1)
        else:
            postkey = ""
        return (title, wikitext, comment, postkey)

    def post_wikitext(self, project_uid, name, title, wikitext, comment, postkey):
        c = crawlerlib.Crawler(self._user)
        url = "http://sourceforge.jp/projects/%s/wiki/%s?action=update" % (project_uid, name)
        
        title = title.encode("utf_8")
        wikitext = wikitext.encode("utf_8")
        comment = comment.encode("utf_8")

        params = {
            "title": title,
            "textarea_height": 24,
            "text": wikitext,
            "comment": comment,
            "postkey": postkey
            }
        res = c.post_form(url, params)
        return res