
hwnd
User
Apr 18, 2013, 5:33 PM
Post #45 of 45
(10766 views)
|
Re: [FishMonger] Reading results back that were dumped?
[In reply to]
|
Can't Post
|
|
Ok, so I have managed to get everything working. Thanks for the help so far everyone, I have learned multiple ways of doing this. use strict; use warnings; use constant PER_PAGE => 10; use lib qw( /home/88/64/2016488/lib/perl5/lib/perl5 ); use hwnd; use CGI; use Data::Page; my $q = CGI->new; my $h = hwnd->new; my $page_id = defined $q->param('page') ? $q->param('page') : 1; my $dbh = $h->connect('DBI:mysql:*****:*****', '*****', '*****'); my $href = $dbh->selectall_hashref ( q/SELECT rec_id, rec_date, rec_head FROM news/, q/rec_id/ ); my @keys = ( defined $href ? sort {$a <=> $b} keys %$href : () ); my $page = Data::Page->new(scalar @keys, PER_PAGE, $page_id); my @items = $page->splice(\@keys); print $q->header; print $q->h1("Page $page_id (of ", $page->last_page, ')'); my $i = 0; foreach (@items) { print ++$i, ": $_, $href->{$_}->{rec_date}, $href->{$_}->{rec_head}\n"; } print $q->a({-href => '?page=' . $page->previous_page}, 'Prev') if $page->previous_page; print $q->a({-href => '?page=' . $page->next_page}, 'Next') if $page->next_page;
(This post was edited by hwnd on Apr 18, 2013, 5:39 PM)
|