CustomFeed::DeliciousBackUp

Net::Deliciousでall_posts取得してるだけです.データ量にもよるけど、まとめて取ってくるのでFetch時間かかります.取得したらStore::DBIC等で保存しておけば良いかと.

package Plagger::Plugin::CustomFeed::DeliciousBackup;
use strict;
use base qw( Plagger::Plugin );
use Plagger::Tag;
use Net::Delicious;

sub register {
    my($self, $context) = @_;
    $context->register_hook(
                            $self,
                            'subscription.load' => \&load,
                            );
}

sub load {
    my($self, $context) = @_;
    
    my $feed = Plagger::Feed->new;
    $feed->aggregator( sub{ $self->aggregate(@_) } );
    $context->subscription->add( $feed );
}

sub aggregate {
    my($self, $context, $args) = @_;

    my $username = $self->conf->{username};
    my $delicious = Net::Delicious->new({ user => $username,
                                          pswd => $self->conf->{password},
                                      });
    my $feed = Plagger::Feed->new;
    $feed->type( 'del.icio.us:BackUp' ); 
    $feed->url( 'http://del.icio.us/rss/'.$username );
    $feed->title( 'del.icio.us:BackUp:'.$username );

    foreach my $post ( $delicious->all_posts() ) {
        my $entry = Plagger::Entry->new;
        $entry->title( $post->description );
        $entry->link( $post->href );
        $entry->author( $post->user()->name );
        my $date = Plagger::Date->parse( 'W3CDTF', $post->time);
        $entry->date( $date );
        my @tags = Plagger::Tag->parse( $post->tags );
        $entry->tags( \@tags );
        $entry->body( $post->extended() );

        $feed->add_entry( $entry );
    }
    
    $context->update->add( $feed );
}

1;